diff --git a/README.md b/README.md index f474694..9f64c57 100644 --- a/README.md +++ b/README.md @@ -2,6 +2,8 @@ Please keep in mind that this project is WIP. +Trello board: [Click me!](https://trello.com/b/6zWLE6Jm) + ## What can it do? - Backup you stuff via a dynamic configuration - Log the Backup progress to a database diff --git a/StorageTypes/AzureFileStorage.go b/StorageTypes/AzureFileStorage.go index 73ba9b3..142be30 100644 --- a/StorageTypes/AzureFileStorage.go +++ b/StorageTypes/AzureFileStorage.go @@ -5,8 +5,8 @@ import ( "encoding/json" "fmt" "github.com/Azure/azure-storage-file-go/azfile" - "github.com/google/uuid" "github.com/cheggaaa/pb/v3" + "github.com/google/uuid" "net/url" "os" "path/filepath" @@ -24,7 +24,7 @@ type AzureFileStorage struct{ } -func (azure AzureFileStorage) upload(fileName string, backupName string){ +func (azure AzureFileStorage) upload(fileName string, backupName string, destinationPath string){ logger := Logging.DetailedLogger("AzureFileStorage", "upload") file, err := os.Open(fileName) @@ -43,6 +43,10 @@ func (azure AzureFileStorage) upload(fileName string, backupName string){ logger.Fatal(err) } + if destinationPath != ""{ + azure.TargetDirectory = destinationPath + } + u, _ := url.Parse(fmt.Sprintf("https://%s.file.core.windows.net/%s/%s/%s", azure.StorageAccountName, azure.FileshareName ,azure.TargetDirectory, filepath.Base(fileName))) fileURL := azfile.NewFileURL(*u, azfile.NewPipeline(credential, azfile.PipelineOptions{})) diff --git a/StorageTypes/StorageInterface.go b/StorageTypes/StorageInterface.go index 43e243f..3de8d9a 100644 --- a/StorageTypes/StorageInterface.go +++ b/StorageTypes/StorageInterface.go @@ -3,24 +3,26 @@ package StorageTypes import "scabiosa/SQL" type Storage interface { - upload(fileName string, backupName string) + upload(fileName string, backupName string, destinationPath string) } -func UploadFile(storage Storage, fileName string, backupName string){ - storage.upload(fileName, backupName) +func UploadFile(storage Storage, fileName string, backupName string, destinationPath string){ + storage.upload(fileName, backupName, destinationPath) } func CheckStorageType(storageType string) Storage{ - if storageType == "azure-fileshare"{ + if storageType == "azure-fileshare" { return GetAzureStorage() } + return nil } func CheckRemoteStorageType(storageType string) SQL.RemoteStorageType { - if storageType == "azure-fileshare"{ + if storageType == "azure-fileshare" { return SQL.REMOTE_AZURE_FILE } - return 3 + + return SQL.REMOTE_NONE } \ No newline at end of file diff --git a/Tools/Config.go b/Tools/Config.go index 49fe55e..49642d2 100644 --- a/Tools/Config.go +++ b/Tools/Config.go @@ -20,16 +20,11 @@ type Config struct { FolderToBackup []struct{ BackupName string `json:"backupName"` FolderPath string `json:"folderPath"` - StorageType string `json:"storageType"` + RemoteStorageType string `json:"remoteStorageType"` + TargetPath string `json:"targetPath"` CreateLocalBackup bool `json:"createLocalBackup"` } `json:"foldersToBackup"` } -type Backup struct{ - backupName string - folderPath string - storageType string - createLocalBackup bool -} func readConfig() []byte { logger := Logging.DetailedLogger("ConfigHandler", "readConfig") diff --git a/config/config.json b/config/config.json index b2368c1..21652af 100644 --- a/config/config.json +++ b/config/config.json @@ -13,6 +13,7 @@ "backupName": "", "folderPath": "", "storageType": "", + "targetPath": "", "createLocalBackup": false } ] diff --git a/main.go b/main.go index e7dc98c..0d7157e 100644 --- a/main.go +++ b/main.go @@ -17,17 +17,28 @@ func main(){ SQL.CreateDefaultTables(SQL.GetSQLInstance()) for _, backupItem := range config.FolderToBackup{ - storage := StorageTypes.CheckStorageType(backupItem.StorageType) - destPath := checkTmpPath(config, backupItem.CreateLocalBackup) + + var storage StorageTypes.Storage + var destPath string + + if backupItem.RemoteStorageType != "none"{ + storage = StorageTypes.CheckStorageType(backupItem.RemoteStorageType) + destPath = checkTmpPath(backupItem.CreateLocalBackup, backupItem.TargetPath) + } else { + destPath = backupItem.TargetPath + } bakFile := Compressor.CreateBakFile(backupItem.BackupName + getTimeSuffix(), backupItem.FolderPath, destPath, backupItem.BackupName) - StorageTypes.UploadFile(storage, bakFile, backupItem.BackupName) - if !backupItem.CreateLocalBackup { - _ = os.Remove(bakFile) - SQL.NewLogEntry(SQL.GetSQLInstance(), uuid.New(), SQL.LogInfo, backupItem.BackupName, SQL.SQLStage_DeleteTmp, SQL.REMOTE_NONE, "Deleted tmp file" ,time.Now()) + if backupItem.RemoteStorageType != "none"{ + StorageTypes.UploadFile(storage, bakFile, backupItem.BackupName, backupItem.TargetPath) } - SQL.NewBackupEntry(SQL.GetSQLInstance(), backupItem.BackupName, time.Now(), backupItem.CreateLocalBackup, backupItem.FolderPath, StorageTypes.CheckRemoteStorageType(backupItem.StorageType), StorageTypes.GetAzureStorage().TargetDirectory) + + if !backupItem.CreateLocalBackup && backupItem.RemoteStorageType != "none"{ + _ = os.Remove(bakFile) + SQL.NewLogEntry(SQL.GetSQLInstance(), uuid.New(), SQL.LogInfo, backupItem.BackupName, SQL.SQLStage_DeleteTmp, SQL.REMOTE_NONE, "Deleted tmp file" ,time.Now()) + } + SQL.NewBackupEntry(SQL.GetSQLInstance(), backupItem.BackupName, time.Now(), backupItem.CreateLocalBackup, backupItem.FolderPath, StorageTypes.CheckRemoteStorageType(backupItem.RemoteStorageType), StorageTypes.GetAzureStorage().TargetDirectory) } } @@ -39,9 +50,9 @@ func getTimeSuffix() string{ return "_" + currTime.Format("02-01-2006_15-04") } -func checkTmpPath(config Tools.Config, createLocalBackup bool) string{ +func checkTmpPath(createLocalBackup bool, targetPath string) string{ logger := Logging.DetailedLogger("mainThread", "checkTmpPath") - if !createLocalBackup{ + if !createLocalBackup { if _, err := os.Stat("tmp"); os.IsNotExist(err) { dirErr := os.Mkdir("tmp", 0775) if dirErr != nil { @@ -51,5 +62,5 @@ func checkTmpPath(config Tools.Config, createLocalBackup bool) string{ return "tmp" } - return config.LocalBackupPath + return targetPath }