Compare commits
No commits in common. "c08bc7586abf2c38a35d487c6d4abca5dfe511bc" and "3da2fe8b22579f5bab834f5a2a14a45ce1d1ca3a" have entirely different histories.
c08bc7586a
...
3da2fe8b22
11 changed files with 372 additions and 941 deletions
4
.gitignore
vendored
4
.gitignore
vendored
|
@ -1,7 +1,3 @@
|
|||
/mozilla-central
|
||||
/patches
|
||||
/packages.json
|
||||
/packages_temp.json
|
||||
/APKINDEX
|
||||
/APPINDEX
|
||||
/browser-amd64-nightly-linux.tar.gz
|
101
README.md
101
README.md
|
@ -1,95 +1,50 @@
|
|||
<p align="center">
|
||||
<img src="https://weforgecode.xyz/Spitfire/Branding/raw/branch/main/icon5.svg" alt="Logo" width="64" height="64">
|
||||
</p>
|
||||
# Spitfire Builder
|
||||
|
||||
<p align="center" style="font-size: 32px;">
|
||||
<strong>Spitfire Builder</strong>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
This is a "simple" script for building the Spitfire Browser based on Mozilla Firefox source code.
|
||||
</p>
|
||||
This is a simple bash script for building the Spitfire Browser based on Mozilla Firefox source code.
|
||||
|
||||
## Dependencies
|
||||
|
||||
- Mercurial (hg)
|
||||
- Git
|
||||
- Golang (tested with v1.21)
|
||||
|
||||
# Example usage:
|
||||
## Usage
|
||||
|
||||
## Build:
|
||||
Run the script `builder.sh` with the following options:
|
||||
|
||||
```sh
|
||||
go run . -a
|
||||
- `-a, --all` : Perform all steps (build, clean, update).
|
||||
- `-b, --build` : Build Spitfire.
|
||||
- `-c, --clean` : Clean build.
|
||||
- `-u, --update` : Update Mozilla repository.
|
||||
- `-p, --patches` : Update patches.
|
||||
- `-r, --run` : Run the project after build using mach run in the browser directory
|
||||
- `-h, --help` : Display usage instructions.
|
||||
|
||||
## For example:
|
||||
|
||||
```bash
|
||||
./builder.sh --all
|
||||
```
|
||||
|
||||
## Upload:
|
||||
# Repositary
|
||||
|
||||
```sh
|
||||
go run . --upload -c --upload-path=./mozilla-central/obj-x86_64-pc-linux-gnu/dist/bin
|
||||
Optinally script also contains code to upload builds to sourceforge, but this can be acessed only with auth key.
|
||||
|
||||
```bash
|
||||
./upload.sh
|
||||
```
|
||||
|
||||
## Build and upload:
|
||||
|
||||
```sh
|
||||
go run . --upload -c --upload-path=./mozilla-central/obj-x86_64-pc-linux-gnu/dist/bin -a
|
||||
```
|
||||
|
||||
## Display all flags:
|
||||
|
||||
```sh
|
||||
go run . -h
|
||||
```
|
||||
|
||||
### Config file for uploading example:
|
||||
|
||||
*sourceforge_config.json*
|
||||
|
||||
```json
|
||||
{
|
||||
"SFKeyPath": "~/.ssh/id_rsa.pub",
|
||||
"SFUser": "internet-addict",
|
||||
"SFHost": "web.sourceforge.net",
|
||||
"SFProject": "spitfire-browser"
|
||||
}
|
||||
```
|
||||
|
||||
## APPINDEX example:
|
||||
|
||||
```
|
||||
C:905cd0cc2dea9e400e1ecd099462b6b19188a9f1
|
||||
P:Spitfire
|
||||
R:nightly
|
||||
V:2024.09.08
|
||||
A:amd64
|
||||
S:788506622
|
||||
I:3324483350
|
||||
T:Spitfire build
|
||||
U:https://spitfirebrowser.com/
|
||||
L:AGPL-3.0
|
||||
o:browser
|
||||
m:Internet Addict
|
||||
t:1725830641
|
||||
c:905cd0cc2dea9e400e1ecd099462b6b19188a9f1
|
||||
D:
|
||||
p:linux
|
||||
q:
|
||||
Z:905cd0cc2dea9e400e1ecd099462b6b19188a9f1
|
||||
```
|
||||
|
||||
## Repositary structure
|
||||
## Structure
|
||||
|
||||
```
|
||||
spitfire-browser/
|
||||
├── browser/
|
||||
│ ├── amd64/
|
||||
│ ├── x86_64/
|
||||
│ │ ├── stable/
|
||||
│ │ │ ├── latest/deb.tar.gz
|
||||
│ │ │ ├── x.x.x/deb.tar.gz
|
||||
│ │ │ ├── latest/linux.tar.gz
|
||||
│ │ │ ├── x.x.x/linux.tar.gz
|
||||
│ │ ├── nightly/
|
||||
│ │ ├── latest/deb.tar.gz
|
||||
│ │ ├── yyyy-mm-dd/deb.tar.gz
|
||||
│ │ ├── latest/linux.tar.gz
|
||||
│ │ ├── yyyy-mm-dd/linux.tar.gz
|
||||
│ ├── arm/
|
||||
│ │ ├── stable/
|
||||
│ │ │ ├── latest/
|
||||
|
@ -140,5 +95,5 @@ spitfire-browser/
|
|||
│ ├── nightly/
|
||||
│ ├── latest/
|
||||
│ ├── yyyy-mm-dd/
|
||||
├── APPINDEX
|
||||
├── packages.json
|
||||
```
|
184
builder.sh
Executable file
184
builder.sh
Executable file
|
@ -0,0 +1,184 @@
|
|||
#!/bin/bash
|
||||
|
||||
# Define source paths
|
||||
SOURCE_PATH="./mozilla-central"
|
||||
PATCHES_DIR="$SOURCE_PATH/Spitfire"
|
||||
SOURCE_REPO="https://hg.mozilla.org/mozilla-central"
|
||||
PATCHES_REPO="https://weforgecode.xyz/Spitfire/Browser.git"
|
||||
|
||||
SOURCE_PATH=$(realpath "$SOURCE_PATH") || { echo "Failed to resolve SOURCE_PATH. Exiting."; exit 1; }
|
||||
PATCHES_DIR=$(realpath "$PATCHES_DIR") || { echo "Failed to resolve PATCHES_DIR. Exiting."; exit 1; }
|
||||
|
||||
# Function to download Mozilla source if not present
|
||||
download_source() {
|
||||
if [ ! -d "$SOURCE_PATH" ]; then
|
||||
echo "Mozilla source not found. Cloning from repository..."
|
||||
hg clone "$SOURCE_REPO" "$SOURCE_PATH" || { echo "Failed to clone Mozilla repository. Exiting."; exit 1; }
|
||||
else
|
||||
echo "Mozilla source already exists."
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to discard uncommitted changes
|
||||
discard_changes() {
|
||||
echo "Discarding uncommitted changes..."
|
||||
hg revert --all --no-backup -R "$SOURCE_PATH" || { echo "Failed to revert changes in Mozilla repository. Exiting."; exit 1; }
|
||||
}
|
||||
|
||||
# Function to clean build
|
||||
clean_build() {
|
||||
echo "Cleaning build..."
|
||||
cd "$SOURCE_PATH" || { echo "Failed to navigate to source directory. Exiting."; exit 1; }
|
||||
hg revert --all --no-backup || { echo "Failed to revert changes in Mozilla repository. Exiting."; exit 1; }
|
||||
./mach clobber || { echo "Failed to clean build. Exiting."; exit 1; }
|
||||
}
|
||||
|
||||
# Function to update Mozilla repository
|
||||
update_repo() {
|
||||
echo "Updating Mozilla repository..."
|
||||
cd "$SOURCE_PATH" || { echo "Failed to navigate to source directory. Exiting."; exit 1; }
|
||||
hg pull -u || { echo "Failed to update Mozilla repository. Exiting."; exit 1; }
|
||||
}
|
||||
|
||||
# Function to update patches
|
||||
update_patches() {
|
||||
echo "Updating patches..."
|
||||
if [ -d "$PATCHES_DIR" ]; then
|
||||
echo "Patches directory already exists. Cleaning and pulling updates..."
|
||||
cd "$PATCHES_DIR" || { echo "Failed to navigate to patches directory. Exiting."; exit 1; }
|
||||
git clean -xdf || { echo "Failed to clean patches directory. Exiting."; exit 1; }
|
||||
|
||||
# Stash any local changes to ensure a clean rebase
|
||||
git stash push --include-untracked || { echo "Failed to stash local changes. Exiting."; exit 1; }
|
||||
|
||||
# Fetching all branches
|
||||
git fetch || { echo "Failed to fetch updates from patches repository. Exiting."; exit 1; }
|
||||
|
||||
# Trying to rebase onto 'main' branch
|
||||
if git show-ref --verify --quiet refs/heads/main; then
|
||||
git rebase origin/main || { echo "Failed to rebase updates from main branch. Exiting."; exit 1; }
|
||||
elif git show-ref --verify --quiet refs/heads/master; then
|
||||
# Fallback to 'master' branch if 'main' does not exist
|
||||
git rebase origin/master || { echo "Failed to rebase updates from master branch. Exiting."; exit 1; }
|
||||
else
|
||||
echo "No valid branch (main or master) found in patches repository. Exiting."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Drop stashed changes to discard local modifications
|
||||
git stash drop || { echo "Failed to drop stashed changes. Exiting."; exit 1; }
|
||||
else
|
||||
echo "Patches directory does not exist. Cloning repository..."
|
||||
git clone "$PATCHES_REPO" "$PATCHES_DIR" || { echo "Failed to clone patches repository. Exiting."; exit 1; }
|
||||
fi
|
||||
|
||||
echo "Copying files from patches directory to Firefox source directory..."
|
||||
rsync -av --exclude='.git' "$PATCHES_DIR/" "$SOURCE_PATH/" || { echo "Failed to copy files. Exiting."; exit 1; }
|
||||
}
|
||||
|
||||
# Function to configure Spitfire
|
||||
configure() {
|
||||
echo "Configuring Spitfire..."
|
||||
cd "$SOURCE_PATH" || { echo "Failed to navigate to source directory. Exiting."; exit 1; }
|
||||
./mach configure || { echo "Configuration failed. Exiting."; exit 1; }
|
||||
}
|
||||
|
||||
# Function to build Spitfire
|
||||
build() {
|
||||
echo "Building Spitfire..."
|
||||
cd "$SOURCE_PATH" || { echo "Failed to navigate to source directory. Exiting."; exit 1; }
|
||||
./mach build || { echo "Build failed. Exiting."; exit 1; }
|
||||
}
|
||||
|
||||
# Function to run the project after build
|
||||
run_project() {
|
||||
echo "Running the project..."
|
||||
cd "$SOURCE_PATH" || { echo "Failed to navigate to browser directory. Exiting."; exit 1; }
|
||||
./mach run || { echo "Failed to run the project. Exiting."; exit 1; }
|
||||
}
|
||||
|
||||
# Function to print usage instructions
|
||||
print_help() {
|
||||
echo "Usage: ./builder.sh [options]"
|
||||
echo "Options:"
|
||||
echo " -a, --all : Perform all steps (build, clean, update)"
|
||||
echo " -b, --build : Build Spitfire"
|
||||
echo " -c, --clean : Clean build"
|
||||
echo " -u, --update : Update Mozilla repository"
|
||||
echo " -p, --patches : Update patches"
|
||||
echo " -r, --run : Run the project after build using mach run in the browser directory"
|
||||
echo " -h, --help : Display this help message"
|
||||
exit 0
|
||||
}
|
||||
|
||||
# Parse command line arguments
|
||||
while [[ $# -gt 0 ]]; do
|
||||
key="$1"
|
||||
case $key in
|
||||
-a|--all)
|
||||
all=true
|
||||
;;
|
||||
-b|--build)
|
||||
build=true
|
||||
;;
|
||||
-c|--clean)
|
||||
clean=true
|
||||
;;
|
||||
-u|--update)
|
||||
update=true
|
||||
;;
|
||||
-p|--patches)
|
||||
patches=true
|
||||
;;
|
||||
-r|--run)
|
||||
run=true
|
||||
;;
|
||||
-h|--help)
|
||||
print_help
|
||||
;;
|
||||
*)
|
||||
echo "Invalid option: $key"
|
||||
print_help
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
# Main script execution based on flags
|
||||
if [ "$all" = true ]; then
|
||||
download_source
|
||||
discard_changes
|
||||
clean_build
|
||||
update_repo
|
||||
update_patches
|
||||
configure
|
||||
build
|
||||
if [ "$run" = true ]; then
|
||||
run_project
|
||||
fi
|
||||
echo "Spitfire build completed successfully."
|
||||
elif [ "$build" = true ]; then
|
||||
configure
|
||||
build
|
||||
if [ "$run" = true ]; then
|
||||
run_project
|
||||
fi
|
||||
echo "Spitfire build completed successfully."
|
||||
elif [ "$clean" = true ]; then
|
||||
clean_build
|
||||
echo "Cleaned Firefox build."
|
||||
elif [ "$update" = true ]; then
|
||||
download_source
|
||||
update_repo
|
||||
echo "Mozilla repository updated."
|
||||
elif [ "$patches" = true ]; then
|
||||
download_source
|
||||
update_patches
|
||||
echo "Patches updated."
|
||||
elif [ "$run" = true ]; then
|
||||
run_project
|
||||
else
|
||||
print_help
|
||||
fi
|
||||
|
||||
exit 0
|
3
go.mod
3
go.mod
|
@ -1,3 +0,0 @@
|
|||
module spitfire
|
||||
|
||||
go 1.18
|
260
main.go
260
main.go
|
@ -1,260 +0,0 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime" // for detecting system architecture and platform
|
||||
"time"
|
||||
"spitfire/spitfire"
|
||||
//"errors"
|
||||
)
|
||||
|
||||
var (
|
||||
// Define all flags as package-level variables
|
||||
all bool
|
||||
buildFlag bool
|
||||
clean bool
|
||||
update bool
|
||||
patches bool
|
||||
run bool
|
||||
compress bool
|
||||
buildPath string
|
||||
target string
|
||||
version string
|
||||
component string
|
||||
arch string
|
||||
release string
|
||||
platform string
|
||||
upload bool
|
||||
uploadPath string
|
||||
sourceRepo = "https://hg.mozilla.org/mozilla-central"
|
||||
patchesRepo = "https://weforgecode.xyz/Spitfire/Browser.git"
|
||||
url = "https://spitfirebrowser.com/"
|
||||
licence = "AGPL-3.0"
|
||||
name = "Spitfire"
|
||||
maintainer = "Internet Addict"
|
||||
initialDir string
|
||||
)
|
||||
|
||||
func init() {
|
||||
flag.StringVar(&buildPath, "p", "", "Path to the build directory")
|
||||
flag.StringVar(&target, "t", "", "Target location format: component-arch-release-platform")
|
||||
flag.BoolVar(&compress, "c", false, "Compress the build directory into a tar.gz file before uploading")
|
||||
flag.StringVar(&version, "v", "", "Specify version for the package. For nightly, use current date if not specified.")
|
||||
flag.StringVar(&component, "component", "browser", "Component name (default: browser)")
|
||||
flag.StringVar(&arch, "arch", runtime.GOARCH, "Architecture (default: system architecture)")
|
||||
flag.StringVar(&release, "release", "nightly", "Release type (default: nightly)")
|
||||
flag.StringVar(&platform, "platform", runtime.GOOS, "Platform (default: system platform)")
|
||||
flag.BoolVar(&all, "a", false, "Perform all steps (build, clean, update)")
|
||||
flag.BoolVar(&buildFlag, "b", false, "Build Spitfire")
|
||||
flag.BoolVar(&clean, "clean", false, "Clean build")
|
||||
flag.BoolVar(&update, "u", false, "Update Mozilla repository")
|
||||
flag.BoolVar(&patches, "patches", false, "Update patches")
|
||||
flag.BoolVar(&run, "r", false, "Run the project after build")
|
||||
flag.BoolVar(&upload, "upload", false, "Upload the compressed build file to SourceForge")
|
||||
flag.StringVar(&uploadPath, "upload-path", "", "Path to the file to upload if no build present")
|
||||
flag.Bool("h", false, "Display help message")
|
||||
}
|
||||
|
||||
func printHelp() {
|
||||
fmt.Println("Usage: ./main -p=<path-to-build> -t=<target> [-c|--compress] [-v|--version=<version>] [-component=<component>] [-arch=<architecture>] [-release=<release>] [-platform=<platform>]")
|
||||
flag.PrintDefaults()
|
||||
fmt.Println("Example: go run . --upload -c --upload-path=./mozilla-central/obj-x86_64-pc-linux-gnu/dist/bin -a")
|
||||
os.Exit(0)
|
||||
}
|
||||
|
||||
func main() {
|
||||
flag.Parse()
|
||||
|
||||
if flag.Lookup("h").Value.(flag.Getter).Get().(bool) {
|
||||
printHelp()
|
||||
}
|
||||
|
||||
// Set version to current date if it's empty and release is nightly
|
||||
if version == "" && release == "nightly" {
|
||||
version = time.Now().Format("2006.01.02") // Set version to current date if nightly
|
||||
}
|
||||
|
||||
// Save the initial directory
|
||||
var err error
|
||||
initialDir, err = os.Getwd()
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to get current working directory: %v", err)
|
||||
}
|
||||
fmt.Printf("Initial working directory: %s\n", initialDir)
|
||||
|
||||
if all || buildFlag {
|
||||
BuildProcess()
|
||||
}
|
||||
|
||||
if compress || upload {
|
||||
PackageAndUploadProcess()
|
||||
}
|
||||
|
||||
spitfire.PrintErrors()
|
||||
}
|
||||
|
||||
// BuildProcess handles the build process: downloading, cleaning, configuring, and building the project.
|
||||
func BuildProcess() {
|
||||
sourcePath, err := spitfire.ResolvePath("./mozilla-central")
|
||||
if err != nil {
|
||||
log.Fatalf("Error resolving source path: %v", err)
|
||||
}
|
||||
|
||||
patchesDir, err := spitfire.ResolvePath(filepath.Join(sourcePath, "Spitfire"))
|
||||
if err != nil {
|
||||
log.Fatalf("Error resolving patches directory: %v", err)
|
||||
}
|
||||
|
||||
if all {
|
||||
spitfire.DownloadSource(sourcePath, sourceRepo)
|
||||
spitfire.DiscardChanges(sourcePath)
|
||||
spitfire.CleanBuild(sourcePath)
|
||||
spitfire.UpdateRepo(sourcePath)
|
||||
spitfire.UpdatePatches(patchesDir, patchesRepo, sourcePath)
|
||||
spitfire.Configure(sourcePath)
|
||||
spitfire.Build(sourcePath)
|
||||
if run {
|
||||
spitfire.RunProject(sourcePath)
|
||||
}
|
||||
fmt.Println("Spitfire build completed successfully.")
|
||||
} else if clean {
|
||||
spitfire.CleanBuild(sourcePath)
|
||||
fmt.Println("Cleaned Firefox build.")
|
||||
} else if update {
|
||||
spitfire.DownloadSource(sourcePath, sourceRepo)
|
||||
spitfire.UpdateRepo(sourcePath)
|
||||
fmt.Println("Mozilla repository updated.")
|
||||
} else if patches {
|
||||
spitfire.DownloadSource(sourcePath, sourceRepo)
|
||||
spitfire.UpdatePatches(patchesDir, patchesRepo, sourcePath)
|
||||
fmt.Println("Patches updated.")
|
||||
} else if buildFlag {
|
||||
spitfire.Configure(sourcePath)
|
||||
spitfire.Build(sourcePath)
|
||||
if run {
|
||||
spitfire.RunProject(sourcePath)
|
||||
}
|
||||
fmt.Println("Spitfire build completed successfully.")
|
||||
}
|
||||
}
|
||||
|
||||
// PackageAndUploadProcess handles compressing, packaging, and uploading the build to SourceForge.
|
||||
func PackageAndUploadProcess() {
|
||||
|
||||
// Restore working directory before performing SourceForge operations
|
||||
restoreWorkingDirectory()
|
||||
|
||||
pathToUse := buildPath
|
||||
if upload && uploadPath != "" {
|
||||
pathToUse = uploadPath
|
||||
}
|
||||
|
||||
if pathToUse == "" {
|
||||
log.Fatalf("Error: no valid build or upload path provided.")
|
||||
}
|
||||
|
||||
// // This is stupid, it wait for the path to exist (up to a maximum wait time)
|
||||
// err := waitForPath(pathToUse, 60, 5) // Max 60 seconds, checking every 5 seconds
|
||||
// if err != nil {
|
||||
// log.Fatalf("Error: Build path or upload path not found: %v", err)
|
||||
// }
|
||||
|
||||
uncompressedSize, err := spitfire.GetDirectorySize(pathToUse)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to calculate uncompressed size: %v", err)
|
||||
}
|
||||
fmt.Printf("Uncompressed directory size: %d bytes\n", uncompressedSize)
|
||||
|
||||
outputCompressedFile := filepath.Join(".", fmt.Sprintf("%s-%s-%s-%s.tar.gz", component, arch, release, platform))
|
||||
if compress {
|
||||
err := spitfire.CompressDirectory(pathToUse, outputCompressedFile)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to compress build directory: %v", err)
|
||||
}
|
||||
fmt.Printf("Build directory compressed to: %s\n", outputCompressedFile)
|
||||
}
|
||||
|
||||
compressedSize, err := spitfire.GetFileSize(outputCompressedFile)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to get compressed file size: %v", err)
|
||||
}
|
||||
fmt.Printf("Compressed file size: %d bytes\n", compressedSize)
|
||||
|
||||
if upload {
|
||||
config, err := spitfire.LoadConfig()
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to load SourceForge config: %v", err)
|
||||
}
|
||||
|
||||
if _, err := os.Stat(outputCompressedFile); err == nil {
|
||||
err = spitfire.Upload(config, outputCompressedFile, "/home/frs/project/spitfire-browser/"+component+"/"+arch+"/"+release+"/"+version+"/")
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to upload compressed file: %v", err)
|
||||
}
|
||||
fmt.Println("Compressed file uploaded successfully.")
|
||||
} else {
|
||||
log.Fatalf("No compressed file found to upload.")
|
||||
}
|
||||
|
||||
err = spitfire.DownloadAPPINDEX(config, "/home/frs/project/spitfire-browser/")
|
||||
if err != nil {
|
||||
fmt.Println("Failed to download APPINDEX. A new APPINDEX will be created and uploaded.")
|
||||
}
|
||||
|
||||
err = spitfire.PackageAPPINDEX(
|
||||
name, release, version, arch,
|
||||
fmt.Sprintf("%d", compressedSize),
|
||||
fmt.Sprintf("%d", uncompressedSize),
|
||||
"Spitfire build", url, licence, component, maintainer, "", platform,
|
||||
)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to update APPINDEX: %v", err)
|
||||
}
|
||||
fmt.Println("APPINDEX updated successfully.")
|
||||
|
||||
if err := spitfire.CleanAppIndex(); err != nil {
|
||||
log.Fatalf("Failed to clean APPINDEX: %v", err)
|
||||
}
|
||||
|
||||
err = spitfire.UploadAPPINDEX(config, "/home/frs/project/spitfire-browser/")
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to upload updated APPINDEX: %v", err)
|
||||
}
|
||||
fmt.Println("APPINDEX uploaded successfully.")
|
||||
}
|
||||
}
|
||||
|
||||
// // waitForPath checks if a path exists, waiting for up to maxWait seconds and retrying every interval seconds.
|
||||
// func waitForPath(path string, maxWait int, interval int) error {
|
||||
// waited := 0
|
||||
// for {
|
||||
// if PathExists(path) {
|
||||
// return nil // Path exists
|
||||
// }
|
||||
// if waited >= maxWait {
|
||||
// return errors.New("path does not exist after waiting")
|
||||
// }
|
||||
// fmt.Printf("Waiting for path %s to exist...\n", path)
|
||||
// time.Sleep(time.Duration(interval) * time.Second)
|
||||
// waited += interval
|
||||
// }
|
||||
// }
|
||||
|
||||
// // PathExists checks if the path exists
|
||||
// func PathExists(path string) bool {
|
||||
// _, err := os.Stat(path)
|
||||
// return !os.IsNotExist(err)
|
||||
// }
|
||||
|
||||
// restoreWorkingDirectory restores the initial working directory after any operation that might change it.
|
||||
func restoreWorkingDirectory() {
|
||||
err := os.Chdir(initialDir)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to restore the working directory: %v", err)
|
||||
}
|
||||
fmt.Printf("Restored working directory to: %s\n", initialDir)
|
||||
}
|
|
@ -1,7 +0,0 @@
|
|||
{
|
||||
"SFKeyPath": "~/.ssh/id_rsa.pub",
|
||||
"SFUser": "internet-addict",
|
||||
"SFHost": "web.sourceforge.net",
|
||||
"SFProject": "spitfire-browser"
|
||||
}
|
||||
|
5
sourceforge_config.sh
Executable file
5
sourceforge_config.sh
Executable file
|
@ -0,0 +1,5 @@
|
|||
# sourceforge_config.sh
|
||||
SF_USER="internet-addict"
|
||||
SF_PROJECT="spitfire-browser"
|
||||
SF_HOST="frs.sourceforge.net"
|
||||
SF_KEY_PATH="$HOME/.ssh/id_rsa" # Path to your SSH private key for SourceForge
|
|
@ -1,181 +0,0 @@
|
|||
package spitfire
|
||||
|
||||
import (
|
||||
"crypto/sha1"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// Package the APPINDEX update process
|
||||
func PackageAPPINDEX(name, release, version, arch, size, installedSize, description, url, license, origin, maintainer, dependencies, platform string) error {
|
||||
// Mock package file name
|
||||
pkgFile := fmt.Sprintf("%s-%s", name, version)
|
||||
|
||||
// Calculate checksums
|
||||
checksum := calcChecksum(pkgFile)
|
||||
contentChecksum := calcChecksum(pkgFile)
|
||||
|
||||
// Timestamp
|
||||
timestamp := time.Now().Unix()
|
||||
|
||||
// Remove existing entry based on P, R, A, and o fields
|
||||
removeExistingEntry(name, release, arch, origin)
|
||||
|
||||
// Open or create the APPINDEX file for appending
|
||||
file, err := os.OpenFile("./APPINDEX", os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to open APPINDEX file: %v", err)
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
// Write the new entry
|
||||
entry := fmt.Sprintf(`
|
||||
C:%s
|
||||
P:%s
|
||||
R:%s
|
||||
V:%s
|
||||
A:%s
|
||||
S:%s
|
||||
I:%s
|
||||
T:%s
|
||||
U:%s
|
||||
L:%s
|
||||
o:%s
|
||||
m:%s
|
||||
t:%d
|
||||
c:%s
|
||||
D:%s
|
||||
p:%s
|
||||
q:
|
||||
Z:%s
|
||||
|
||||
`, checksum, name, release, version, arch, size, installedSize, description, url, license, origin, maintainer, timestamp, contentChecksum, dependencies, platform, checksum)
|
||||
|
||||
if _, err := file.WriteString(entry); err != nil {
|
||||
log.Fatalf("Failed to write to APPINDEX file: %v", err)
|
||||
}
|
||||
|
||||
fmt.Println("APPINDEX has been updated successfully.")
|
||||
return nil
|
||||
}
|
||||
|
||||
// calcChecksum calculates a checksum (SHA-1) for a given input string
|
||||
func calcChecksum(input string) string {
|
||||
h := sha1.New()
|
||||
_, _ = io.WriteString(h, input)
|
||||
return fmt.Sprintf("%x", h.Sum(nil))
|
||||
}
|
||||
|
||||
// removeExistingEntry removes an existing entry from APPINDEX based on P, R, A, and o fields
|
||||
func removeExistingEntry(name, release, arch, origin string) {
|
||||
// Read file contents
|
||||
content, err := os.ReadFile("./APPINDEX")
|
||||
if err != nil {
|
||||
if os.IsNotExist(err) {
|
||||
return // If file does not exist, no need to remove anything
|
||||
}
|
||||
log.Fatalf("Failed to read APPINDEX: %v", err)
|
||||
}
|
||||
|
||||
// Remove lines matching the package with the same P, R, A, and o fields
|
||||
lines := strings.Split(string(content), "\n")
|
||||
var newLines []string
|
||||
remove := false
|
||||
for _, line := range lines {
|
||||
// Detect start of an entry by matching on P, R, A, and o
|
||||
if strings.HasPrefix(line, "P:"+name) {
|
||||
remove = true
|
||||
}
|
||||
if remove && strings.HasPrefix(line, "R:"+release) {
|
||||
remove = true
|
||||
}
|
||||
if remove && strings.HasPrefix(line, "A:"+arch) {
|
||||
remove = true
|
||||
}
|
||||
if remove && strings.HasPrefix(line, "o:"+origin) {
|
||||
remove = true
|
||||
}
|
||||
|
||||
// Stop removal at the end of an entry
|
||||
if remove && line == "" {
|
||||
remove = false
|
||||
continue // Skip the line
|
||||
}
|
||||
|
||||
// Append lines that are not part of the matching entry
|
||||
if !remove {
|
||||
newLines = append(newLines, line)
|
||||
}
|
||||
}
|
||||
|
||||
// Write the updated contents back to the file
|
||||
err = os.WriteFile("./APPINDEX", []byte(strings.Join(newLines, "\n")), 0644)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to update APPINDEX: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
// CleanAppIndex cleans up any orphaned "C:" entries and collapses excessive newlines
|
||||
func CleanAppIndex() error {
|
||||
// Read file contents
|
||||
content, err := os.ReadFile("./APPINDEX")
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to read APPINDEX: %v", err)
|
||||
}
|
||||
|
||||
// Split the file content into lines
|
||||
lines := strings.Split(string(content), "\n")
|
||||
var newLines []string
|
||||
var currentEntry []string
|
||||
inEntry := false
|
||||
|
||||
for _, line := range lines {
|
||||
line = strings.TrimSpace(line)
|
||||
|
||||
// Start of an entry when we encounter a checksum
|
||||
if strings.HasPrefix(line, "C:") {
|
||||
// If we already have a valid entry, add it to newLines
|
||||
if inEntry && len(currentEntry) > 1 {
|
||||
newLines = append(newLines, currentEntry...)
|
||||
}
|
||||
currentEntry = []string{line}
|
||||
inEntry = true
|
||||
} else if inEntry && line == "" {
|
||||
// End of an entry
|
||||
if len(currentEntry) > 1 {
|
||||
newLines = append(newLines, currentEntry...)
|
||||
newLines = append(newLines, "") // Add a blank line to separate entries
|
||||
}
|
||||
currentEntry = nil
|
||||
inEntry = false
|
||||
} else if inEntry {
|
||||
// Continue adding lines to the current entry
|
||||
currentEntry = append(currentEntry, line)
|
||||
} else if line != "" {
|
||||
// Add non-entry lines (for extra safety)
|
||||
newLines = append(newLines, line)
|
||||
}
|
||||
}
|
||||
|
||||
// In case the last entry was valid
|
||||
if inEntry && len(currentEntry) > 1 {
|
||||
newLines = append(newLines, currentEntry...)
|
||||
}
|
||||
|
||||
// Collapse consecutive blank lines
|
||||
cleanedContent := strings.Join(newLines, "\n")
|
||||
cleanedContent = strings.ReplaceAll(cleanedContent, "\n\n\n", "\n\n")
|
||||
|
||||
// Write the cleaned content back to the file
|
||||
err = os.WriteFile("./APPINDEX", []byte(cleanedContent), 0644)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to write cleaned APPINDEX: %v", err)
|
||||
}
|
||||
|
||||
fmt.Println("APPINDEX cleaned successfully.")
|
||||
return nil
|
||||
}
|
|
@ -1,166 +0,0 @@
|
|||
package spitfire
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
// Array to store errors
|
||||
var errors []string
|
||||
|
||||
// Run an external command like scp or rsync
|
||||
func runCommand(command string, args ...string) error {
|
||||
cmd := exec.Command(command, args...)
|
||||
cmd.Stdout = os.Stdout
|
||||
cmd.Stderr = os.Stderr
|
||||
return cmd.Run()
|
||||
}
|
||||
|
||||
// Function to resolve paths using absolute path
|
||||
func ResolvePath(path string) (string, error) {
|
||||
absPath, err := filepath.Abs(path)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to resolve path: %s", path)
|
||||
}
|
||||
return absPath, nil
|
||||
}
|
||||
|
||||
// Function to download Mozilla source if not present
|
||||
func DownloadSource(sourcePath string, sourceRepo string) {
|
||||
if _, err := os.Stat(sourcePath); os.IsNotExist(err) {
|
||||
fmt.Println("Mozilla source not found. Cloning from repository...")
|
||||
if err := runCommand("hg", "clone", sourceRepo, sourcePath); err != nil {
|
||||
errors = append(errors, "Failed to clone Mozilla repository.")
|
||||
}
|
||||
} else {
|
||||
fmt.Println("Mozilla source already exists.")
|
||||
}
|
||||
}
|
||||
|
||||
// Function to discard uncommitted changes
|
||||
func DiscardChanges(sourcePath string) {
|
||||
fmt.Println("Discarding uncommitted changes...")
|
||||
if err := runCommand("hg", "revert", "--all", "--no-backup", "-R", sourcePath); err != nil {
|
||||
errors = append(errors, "Failed to revert changes in Mozilla repository.")
|
||||
}
|
||||
}
|
||||
|
||||
// Function to clean build
|
||||
func CleanBuild(sourcePath string) {
|
||||
fmt.Println("Cleaning build...")
|
||||
if err := os.Chdir(sourcePath); err != nil {
|
||||
errors = append(errors, "Failed to navigate to source directory.")
|
||||
return
|
||||
}
|
||||
if err := runCommand("hg", "revert", "--all", "--no-backup"); err != nil {
|
||||
errors = append(errors, "Failed to revert changes in Mozilla repository.")
|
||||
}
|
||||
if err := runCommand("./mach", "clobber"); err != nil {
|
||||
errors = append(errors, "Failed to clean build.")
|
||||
}
|
||||
}
|
||||
|
||||
// Function to update Mozilla repository
|
||||
func UpdateRepo(sourcePath string) {
|
||||
fmt.Println("Updating Mozilla repository...")
|
||||
if err := os.Chdir(sourcePath); err != nil {
|
||||
errors = append(errors, "Failed to navigate to source directory.")
|
||||
return
|
||||
}
|
||||
if err := runCommand("hg", "pull", "-u"); err != nil {
|
||||
errors = append(errors, "Failed to update Mozilla repository.")
|
||||
}
|
||||
}
|
||||
|
||||
// Function to update patches
|
||||
func UpdatePatches(patchesDir, patchesRepo, sourcePath string) {
|
||||
fmt.Println("Updating patches...")
|
||||
if _, err := os.Stat(patchesDir); err == nil {
|
||||
fmt.Println("Patches directory already exists. Cleaning and pulling updates...")
|
||||
if err := os.Chdir(patchesDir); err != nil {
|
||||
errors = append(errors, "Failed to navigate to patches directory.")
|
||||
return
|
||||
}
|
||||
if err := runCommand("git", "clean", "-xdf"); err != nil {
|
||||
errors = append(errors, "Failed to clean patches directory.")
|
||||
}
|
||||
_ = runCommand("git", "stash", "push", "--include-untracked")
|
||||
if err := runCommand("git", "fetch"); err != nil {
|
||||
errors = append(errors, "Failed to fetch updates from patches repository.")
|
||||
}
|
||||
if runCommand("git", "show-ref", "--verify", "--quiet", "refs/heads/main") == nil {
|
||||
if err := runCommand("git", "rebase", "origin/main"); err != nil {
|
||||
errors = append(errors, "Failed to rebase updates from main branch.")
|
||||
}
|
||||
} else if runCommand("git", "show-ref", "--verify", "--quiet", "refs/heads/master") == nil {
|
||||
if err := runCommand("git", "rebase", "origin/master"); err != nil {
|
||||
errors = append(errors, "Failed to rebase updates from master branch.")
|
||||
}
|
||||
} else {
|
||||
errors = append(errors, "No valid branch (main or master) found in patches repository.")
|
||||
return
|
||||
}
|
||||
if runCommand("git", "stash", "list") == nil {
|
||||
_ = runCommand("git", "stash", "pop")
|
||||
} else {
|
||||
fmt.Println("No stash entries found, skipping pop.")
|
||||
}
|
||||
} else {
|
||||
fmt.Println("Patches directory does not exist. Cloning repository...")
|
||||
if err := runCommand("git", "clone", patchesRepo, patchesDir); err != nil {
|
||||
errors = append(errors, "Failed to clone patches repository.")
|
||||
}
|
||||
}
|
||||
fmt.Println("Copying files from patches directory to Firefox source directory...")
|
||||
if err := runCommand("rsync", "-av", "--exclude=.git", patchesDir+"/", sourcePath+"/"); err != nil {
|
||||
errors = append(errors, "Failed to copy files.")
|
||||
}
|
||||
}
|
||||
|
||||
// Function to configure Spitfire
|
||||
func Configure(sourcePath string) {
|
||||
fmt.Println("Configuring Spitfire...")
|
||||
if err := os.Chdir(sourcePath); err != nil {
|
||||
errors = append(errors, "Failed to navigate to source directory.")
|
||||
return
|
||||
}
|
||||
if err := runCommand("./mach", "configure"); err != nil {
|
||||
errors = append(errors, "Configuration failed.")
|
||||
}
|
||||
}
|
||||
|
||||
// Function to build Spitfire
|
||||
func Build(sourcePath string) {
|
||||
fmt.Println("Building Spitfire...")
|
||||
if err := os.Chdir(sourcePath); err != nil {
|
||||
errors = append(errors, "Failed to navigate to source directory.")
|
||||
return
|
||||
}
|
||||
if err := runCommand("./mach", "build"); err != nil {
|
||||
errors = append(errors, "Build failed.")
|
||||
}
|
||||
}
|
||||
|
||||
// Function to run the project after build
|
||||
func RunProject(sourcePath string) {
|
||||
fmt.Println("Running the project...")
|
||||
if err := os.Chdir(sourcePath); err != nil {
|
||||
errors = append(errors, "Failed to navigate to source directory.")
|
||||
return
|
||||
}
|
||||
if err := runCommand("./mach", "run"); err != nil {
|
||||
errors = append(errors, "Failed to run the project.")
|
||||
}
|
||||
}
|
||||
|
||||
// Function to print collected errors
|
||||
func PrintErrors() {
|
||||
if len(errors) > 0 {
|
||||
fmt.Println("The following errors occurred during execution:")
|
||||
for _, err := range errors {
|
||||
fmt.Printf("- %s\n", err)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,246 +0,0 @@
|
|||
package spitfire
|
||||
|
||||
import (
|
||||
"archive/tar"
|
||||
"crypto/rand"
|
||||
"compress/gzip"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"encoding/hex"
|
||||
)
|
||||
|
||||
// Config struct to hold SourceForge configurations
|
||||
type Config struct {
|
||||
SFKeyPath string
|
||||
SFUser string
|
||||
SFHost string
|
||||
SFProject string
|
||||
}
|
||||
|
||||
// Load the SourceForge configuration from a file
|
||||
func LoadConfig() (*Config, error) {
|
||||
file, err := os.Open("sourceforge_config.json") // Assuming a JSON config file
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
config := &Config{}
|
||||
if err := json.NewDecoder(file).Decode(config); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return config, nil
|
||||
}
|
||||
|
||||
// CompressDirectory compresses the build directory to a tar.gz file using PAX format for large file support
|
||||
func CompressDirectory(srcDir, dstFile string) error {
|
||||
// Create the destination file
|
||||
f, err := os.Create(dstFile)
|
||||
if err != nil {
|
||||
return fmt.Errorf("could not create file %s: %v", dstFile, err)
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
// Create a new gzip writer
|
||||
gw := gzip.NewWriter(f)
|
||||
defer gw.Close()
|
||||
|
||||
// Create a new tar writer with PAX format for large file support
|
||||
tw := tar.NewWriter(gw)
|
||||
defer tw.Close()
|
||||
|
||||
// Walk through the source directory and add files to the tar archive
|
||||
err = filepath.Walk(srcDir, func(file string, fi os.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Create tar header using PAX format
|
||||
header, err := tar.FileInfoHeader(fi, "")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Set the correct header name, preserving the relative directory structure
|
||||
relPath, err := filepath.Rel(srcDir, file)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
header.Name = relPath
|
||||
|
||||
// Explicitly set the type flag for directories
|
||||
if fi.IsDir() {
|
||||
header.Typeflag = tar.TypeDir
|
||||
} else if fi.Mode()&os.ModeSymlink != 0 {
|
||||
// Handle symlinks
|
||||
linkTarget, err := os.Readlink(file)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
header.Linkname = linkTarget
|
||||
}
|
||||
|
||||
// Write the header to the tarball
|
||||
if err := tw.WriteHeader(header); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// If it's a directory or symlink, skip writing its contents
|
||||
if fi.IsDir() || fi.Mode()&os.ModeSymlink != 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Open the file for reading
|
||||
f, err := os.Open(file)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
// Copy the file content to the tar writer
|
||||
if _, err := io.Copy(tw, f); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return fmt.Errorf("error walking the source directory %s: %v", srcDir, err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Upload the file to SourceForge, ensuring the local directory structure is created and uploaded
|
||||
func Upload(config *Config, buildPath, remoteDir string) error {
|
||||
// Generate a random hash for the temp directory name
|
||||
randomHash, err := generateRandomHash(8) // 8 bytes = 16 hex characters
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to generate random hash: %v", err)
|
||||
}
|
||||
|
||||
// Create a temporary directory with the random hash appended
|
||||
tmpDir, err := os.MkdirTemp("", "spitfire-upload-"+randomHash)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to create temporary directory: %v", err)
|
||||
}
|
||||
|
||||
// Create the required local directory structure inside the temporary directory
|
||||
localDir := filepath.Join(tmpDir, remoteDir)
|
||||
err = os.MkdirAll(localDir, os.ModePerm)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to create local directory structure: %v", err)
|
||||
}
|
||||
|
||||
// Move the build file to the local directory structure
|
||||
destinationFile := filepath.Join(localDir, filepath.Base(buildPath))
|
||||
err = copyFile(buildPath, destinationFile)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to copy file to local directory structure: %v", err)
|
||||
}
|
||||
|
||||
// Upload the entire local directory structure to the remote directory
|
||||
fmt.Printf("Uploading file %s to %s on SourceForge...\n", buildPath, remoteDir)
|
||||
scpCmd := exec.Command("scp", "-i", config.SFKeyPath, "-r", tmpDir+"/.", fmt.Sprintf("%s@%s:%s", config.SFUser, config.SFHost, "/"))
|
||||
scpCmd.Stdout = os.Stdout
|
||||
scpCmd.Stderr = os.Stderr
|
||||
return scpCmd.Run()
|
||||
}
|
||||
|
||||
// Helper function to generate a random hash
|
||||
func generateRandomHash(length int) (string, error) {
|
||||
bytes := make([]byte, length)
|
||||
_, err := rand.Read(bytes)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return hex.EncodeToString(bytes), nil
|
||||
}
|
||||
|
||||
// Helper function to copy a file from src to dst
|
||||
func copyFile(src, dst string) error {
|
||||
sourceFile, err := os.Open(src)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer sourceFile.Close()
|
||||
|
||||
destFile, err := os.Create(dst)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer destFile.Close()
|
||||
|
||||
_, err = io.Copy(destFile, sourceFile)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return destFile.Sync() // Ensure all writes to the file are flushed
|
||||
}
|
||||
|
||||
// Download the APPINDEX file from SourceForge
|
||||
func DownloadAPPINDEX(config *Config, remoteDir string) error {
|
||||
fmt.Println("Downloading APPINDEX from SourceForge...")
|
||||
|
||||
// Construct the correct path without double slashes
|
||||
remoteAPPINDEXPath := filepath.Join(remoteDir, "APPINDEX")
|
||||
|
||||
// Run the SCP command to download the APPINDEX file
|
||||
cmd := exec.Command("scp", "-i", config.SFKeyPath, fmt.Sprintf("%s@%s:%s", config.SFUser, config.SFHost, remoteAPPINDEXPath), "./APPINDEX")
|
||||
cmd.Stdout = os.Stdout
|
||||
cmd.Stderr = os.Stderr
|
||||
|
||||
err := cmd.Run()
|
||||
if err != nil {
|
||||
// Check if the error is due to the file not existing
|
||||
if strings.Contains(err.Error(), "No such file or directory") {
|
||||
fmt.Println("APPINDEX file not found on the server. A new one will be created.")
|
||||
return nil // Continue without failing if the APPINDEX is missing
|
||||
}
|
||||
return fmt.Errorf("failed to download APPINDEX: %v", err) // Fail for other types of errors
|
||||
}
|
||||
|
||||
fmt.Println("APPINDEX downloaded successfully.")
|
||||
return nil
|
||||
}
|
||||
|
||||
// Upload the updated APPINDEX file to SourceForge
|
||||
func UploadAPPINDEX(config *Config, remoteDir string) error {
|
||||
fmt.Println("Uploading updated APPINDEX to SourceForge...")
|
||||
cmd := exec.Command("scp", "-i", config.SFKeyPath, "./APPINDEX", fmt.Sprintf("%s@%s:%s", config.SFUser, config.SFHost, remoteDir))
|
||||
cmd.Stdout = os.Stdout
|
||||
cmd.Stderr = os.Stderr
|
||||
return cmd.Run()
|
||||
}
|
||||
|
||||
// GetDirectorySize calculates the total size of all files in a directory
|
||||
func GetDirectorySize(path string) (int64, error) {
|
||||
var size int64
|
||||
err := filepath.Walk(path, func(_ string, info os.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if !info.IsDir() {
|
||||
size += info.Size()
|
||||
}
|
||||
return nil
|
||||
})
|
||||
return size, err
|
||||
}
|
||||
|
||||
// GetFileSize returns the size of a file in bytes
|
||||
func GetFileSize(filePath string) (int64, error) {
|
||||
fileInfo, err := os.Stat(filePath)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return fileInfo.Size(), nil
|
||||
}
|
154
upload.sh
Executable file
154
upload.sh
Executable file
|
@ -0,0 +1,154 @@
|
|||
#!/bin/bash
|
||||
|
||||
# Load SourceForge configuration
|
||||
source sourceforge_config.sh
|
||||
|
||||
# Function to print usage instructions
|
||||
print_help() {
|
||||
echo "Usage: ./upload.sh -p=<path-to-build> -t=<target> [-c|--compress] [-v|--version=<version>]"
|
||||
echo ""
|
||||
echo "Options:"
|
||||
echo " -p, --path : Path to the build directory"
|
||||
echo " -t, --target : Target location format: component-arch-release-platform"
|
||||
echo " -c, --compress : Compress the build directory into a tar.gz file before uploading"
|
||||
echo " -v, --version : Specify version for the package. For nightly, use current date if not specified. For stable, increment version if not specified."
|
||||
echo " -h, --help : Display this help message"
|
||||
echo ""
|
||||
echo "Example use:"
|
||||
echo " # Without compression"
|
||||
echo " ./upload.sh -p=./mozilla-central/obj-x86_64-pc-linux-gnu/dist/bin -t=browser-x86_64-stable-linux -v=1.0"
|
||||
echo ""
|
||||
echo " # With compression"
|
||||
echo " ./upload.sh -p=./mozilla-central/obj-x86_64-pc-linux-gnu/dist/bin -t=browser-x86_64-stable-linux -c -v=1.0"
|
||||
echo ""
|
||||
echo " # Nightly build without specifying version"
|
||||
echo " ./upload.sh -p=./mozilla-central/obj-x86_64-pc-linux-gnu/dist/bin -t=browser-x86_64-nightly-linux -c"
|
||||
|
||||
exit 0
|
||||
}
|
||||
|
||||
COMPRESS=false
|
||||
VERSION=""
|
||||
|
||||
# Parse command line arguments
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case $1 in
|
||||
-p=*|--path=*)
|
||||
BUILD_PATH="${1#*=}"
|
||||
shift
|
||||
;;
|
||||
-t=*|--target=*)
|
||||
TARGET="${1#*=}"
|
||||
shift
|
||||
;;
|
||||
-c|--compress)
|
||||
COMPRESS=true
|
||||
shift
|
||||
;;
|
||||
-v=*|--version=*)
|
||||
VERSION="${1#*=}"
|
||||
shift
|
||||
;;
|
||||
-h|--help)
|
||||
print_help
|
||||
;;
|
||||
*)
|
||||
echo "Invalid option: $1"
|
||||
print_help
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Check if both required arguments are provided
|
||||
if [ -z "$BUILD_PATH" ] || [ -z "$TARGET" ]; then
|
||||
echo "Error: Both path and target must be specified."
|
||||
print_help
|
||||
fi
|
||||
|
||||
# Split the target into its components
|
||||
IFS='-' read -r COMPONENT ARCH RELEASE PLATFORM <<< "$TARGET"
|
||||
|
||||
# Handle versioning
|
||||
if [ -z "$VERSION" ]; then
|
||||
if [[ "$RELEASE" == "nightly" ]]; then
|
||||
VERSION=$(date +"%Y-%m-%d")
|
||||
elif [[ "$RELEASE" == "stable" ]]; then
|
||||
CURRENT_VERSION=$(grep -oP '(?<="'"$COMPONENT"'": ")([^"]*)' packages.json)
|
||||
if [ -n "$CURRENT_VERSION" ]; then
|
||||
MAJOR_VERSION=$(echo $CURRENT_VERSION | cut -d. -f1)
|
||||
MINOR_VERSION=$(echo $CURRENT_VERSION | cut -d. -f2)
|
||||
MINOR_VERSION=$((MINOR_VERSION + 1))
|
||||
VERSION="${MAJOR_VERSION}.${MINOR_VERSION}"
|
||||
else
|
||||
VERSION="1.0"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Update packages.json
|
||||
if [ ! -f packages.json ]; then
|
||||
echo "{}" > packages.json
|
||||
fi
|
||||
|
||||
jq --arg comp "$COMPONENT" --arg ver "$VERSION" '.[$comp] = $ver' packages.json > packages_temp.json && mv packages_temp.json packages.json
|
||||
|
||||
# Determine the upload directory based on the target and version
|
||||
if [[ "$RELEASE" == "nightly" ]]; then
|
||||
if [[ "$VERSION" == $(date +"%Y-%m-%d") ]]; then
|
||||
UPLOAD_DIR="$COMPONENT/$ARCH/$RELEASE/latest"
|
||||
else
|
||||
UPLOAD_DIR="$COMPONENT/$ARCH/$RELEASE/$VERSION"
|
||||
fi
|
||||
elif [[ "$RELEASE" == "stable" ]]; then
|
||||
if [[ "$VERSION" =~ ^[0-9]+\.[0-9]+$ ]]; then
|
||||
UPLOAD_DIR="$COMPONENT/$ARCH/$RELEASE/$VERSION"
|
||||
else
|
||||
UPLOAD_DIR="$COMPONENT/$ARCH/$RELEASE/latest"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Construct the remote directory path
|
||||
REMOTE_DIR="/home/frs/project/$SF_PROJECT/$UPLOAD_DIR"
|
||||
|
||||
# Handle compression if specified
|
||||
if [ "$COMPRESS" = true ]; then
|
||||
COMPRESSED_FILE="/tmp/${TARGET}.tar.gz"
|
||||
echo "Compressing $BUILD_PATH into $COMPRESSED_FILE..."
|
||||
tar -czf "$COMPRESSED_FILE" -C "$BUILD_PATH" .
|
||||
BUILD_PATH="$COMPRESSED_FILE"
|
||||
fi
|
||||
|
||||
# Upload the files to SourceForge
|
||||
echo "Uploading files from $BUILD_PATH to $REMOTE_DIR on SourceForge..."
|
||||
scp -i "$SF_KEY_PATH" "$BUILD_PATH" "$SF_USER@$SF_HOST:$REMOTE_DIR/" 2>/dev/null
|
||||
UPLOAD_STATUS=$?
|
||||
|
||||
if [ $UPLOAD_STATUS -ne 0 ]; then
|
||||
echo "Failed to upload files directly. Creating local directory structure and uploading..."
|
||||
|
||||
# Create the local directory structure
|
||||
TEMP_DIR=$(mktemp -d)
|
||||
mkdir -p "$TEMP_DIR/$UPLOAD_DIR"
|
||||
|
||||
# Upload the directory structure
|
||||
rsync -av --omit-dir-times --no-perms -e "ssh -i $SF_KEY_PATH" "$TEMP_DIR/" "$SF_USER@$SF_HOST:/home/frs/project/$SF_PROJECT/" || { echo "Failed to upload directory structure. Exiting."; rm -rf "$TEMP_DIR"; exit 1; }
|
||||
|
||||
# Clean up the temporary directory
|
||||
rm -rf "$TEMP_DIR"
|
||||
|
||||
# Retry uploading the files
|
||||
scp -i "$SF_KEY_PATH" "$BUILD_PATH" "$SF_USER@$SF_HOST:$REMOTE_DIR/" || { echo "Failed to upload files after creating directory structure. Exiting."; exit 1; }
|
||||
fi
|
||||
|
||||
# Upload the updated packages.json to the root directory
|
||||
echo "Uploading packages.json to the root directory on SourceForge..."
|
||||
scp -i "$SF_KEY_PATH" packages.json "$SF_USER@$SF_HOST:/home/frs/project/$SF_PROJECT/" || { echo "Failed to upload packages.json. Exiting."; exit 1; }
|
||||
|
||||
echo "Upload completed successfully."
|
||||
|
||||
# Clean up compressed file if it was created
|
||||
if [ "$COMPRESS" = true ]; then
|
||||
rm "$COMPRESSED_FILE"
|
||||
fi
|
||||
|
||||
exit 0
|
Loading…
Reference in a new issue