Merge pull request #15 from netbenix/develop

Implemented Local-only save
This commit is contained in:
netbenix 2021-12-23 09:26:04 +01:00 committed by GitHub
commit 7cbbf61633
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
6 changed files with 40 additions and 25 deletions

View file

@ -2,6 +2,8 @@
Please keep in mind that this project is WIP.
Trello board: [Click me!](https://trello.com/b/6zWLE6Jm)
## What can it do?
- Backup you stuff via a dynamic configuration
- Log the Backup progress to a database

View file

@ -5,8 +5,8 @@ import (
"encoding/json"
"fmt"
"github.com/Azure/azure-storage-file-go/azfile"
"github.com/google/uuid"
"github.com/cheggaaa/pb/v3"
"github.com/google/uuid"
"net/url"
"os"
"path/filepath"
@ -24,7 +24,7 @@ type AzureFileStorage struct{
}
func (azure AzureFileStorage) upload(fileName string, backupName string){
func (azure AzureFileStorage) upload(fileName string, backupName string, destinationPath string){
logger := Logging.DetailedLogger("AzureFileStorage", "upload")
file, err := os.Open(fileName)
@ -43,6 +43,10 @@ func (azure AzureFileStorage) upload(fileName string, backupName string){
logger.Fatal(err)
}
if destinationPath != ""{
azure.TargetDirectory = destinationPath
}
u, _ := url.Parse(fmt.Sprintf("https://%s.file.core.windows.net/%s/%s/%s", azure.StorageAccountName, azure.FileshareName ,azure.TargetDirectory, filepath.Base(fileName)))
fileURL := azfile.NewFileURL(*u, azfile.NewPipeline(credential, azfile.PipelineOptions{}))

View file

@ -3,11 +3,11 @@ package StorageTypes
import "scabiosa/SQL"
type Storage interface {
upload(fileName string, backupName string)
upload(fileName string, backupName string, destinationPath string)
}
func UploadFile(storage Storage, fileName string, backupName string){
storage.upload(fileName, backupName)
func UploadFile(storage Storage, fileName string, backupName string, destinationPath string){
storage.upload(fileName, backupName, destinationPath)
}
func CheckStorageType(storageType string) Storage{
@ -15,6 +15,7 @@ func CheckStorageType(storageType string) Storage{
if storageType == "azure-fileshare" {
return GetAzureStorage()
}
return nil
}
@ -22,5 +23,6 @@ func CheckRemoteStorageType(storageType string) SQL.RemoteStorageType {
if storageType == "azure-fileshare" {
return SQL.REMOTE_AZURE_FILE
}
return 3
return SQL.REMOTE_NONE
}

View file

@ -20,16 +20,11 @@ type Config struct {
FolderToBackup []struct{
BackupName string `json:"backupName"`
FolderPath string `json:"folderPath"`
StorageType string `json:"storageType"`
RemoteStorageType string `json:"remoteStorageType"`
TargetPath string `json:"targetPath"`
CreateLocalBackup bool `json:"createLocalBackup"`
} `json:"foldersToBackup"`
}
type Backup struct{
backupName string
folderPath string
storageType string
createLocalBackup bool
}
func readConfig() []byte {
logger := Logging.DetailedLogger("ConfigHandler", "readConfig")

View file

@ -13,6 +13,7 @@
"backupName": "",
"folderPath": "",
"storageType": "",
"targetPath": "",
"createLocalBackup": false
}
]

25
main.go
View file

@ -17,17 +17,28 @@ func main(){
SQL.CreateDefaultTables(SQL.GetSQLInstance())
for _, backupItem := range config.FolderToBackup{
storage := StorageTypes.CheckStorageType(backupItem.StorageType)
destPath := checkTmpPath(config, backupItem.CreateLocalBackup)
var storage StorageTypes.Storage
var destPath string
if backupItem.RemoteStorageType != "none"{
storage = StorageTypes.CheckStorageType(backupItem.RemoteStorageType)
destPath = checkTmpPath(backupItem.CreateLocalBackup, backupItem.TargetPath)
} else {
destPath = backupItem.TargetPath
}
bakFile := Compressor.CreateBakFile(backupItem.BackupName + getTimeSuffix(), backupItem.FolderPath, destPath, backupItem.BackupName)
StorageTypes.UploadFile(storage, bakFile, backupItem.BackupName)
if !backupItem.CreateLocalBackup {
if backupItem.RemoteStorageType != "none"{
StorageTypes.UploadFile(storage, bakFile, backupItem.BackupName, backupItem.TargetPath)
}
if !backupItem.CreateLocalBackup && backupItem.RemoteStorageType != "none"{
_ = os.Remove(bakFile)
SQL.NewLogEntry(SQL.GetSQLInstance(), uuid.New(), SQL.LogInfo, backupItem.BackupName, SQL.SQLStage_DeleteTmp, SQL.REMOTE_NONE, "Deleted tmp file" ,time.Now())
}
SQL.NewBackupEntry(SQL.GetSQLInstance(), backupItem.BackupName, time.Now(), backupItem.CreateLocalBackup, backupItem.FolderPath, StorageTypes.CheckRemoteStorageType(backupItem.StorageType), StorageTypes.GetAzureStorage().TargetDirectory)
SQL.NewBackupEntry(SQL.GetSQLInstance(), backupItem.BackupName, time.Now(), backupItem.CreateLocalBackup, backupItem.FolderPath, StorageTypes.CheckRemoteStorageType(backupItem.RemoteStorageType), StorageTypes.GetAzureStorage().TargetDirectory)
}
}
@ -39,7 +50,7 @@ func getTimeSuffix() string{
return "_" + currTime.Format("02-01-2006_15-04")
}
func checkTmpPath(config Tools.Config, createLocalBackup bool) string{
func checkTmpPath(createLocalBackup bool, targetPath string) string{
logger := Logging.DetailedLogger("mainThread", "checkTmpPath")
if !createLocalBackup {
if _, err := os.Stat("tmp"); os.IsNotExist(err) {
@ -51,5 +62,5 @@ func checkTmpPath(config Tools.Config, createLocalBackup bool) string{
return "tmp"
}
return config.LocalBackupPath
return targetPath
}