diec and refractoring
This commit is contained in:
parent
a62157e8e5
commit
0d715ccb37
20 changed files with 355 additions and 200 deletions
|
@ -36,14 +36,14 @@ func setDefaults() {
|
|||
viper.SetDefault("web.cert", "/etc/ssl/certs/ssl-cert-snakeoil.pem")
|
||||
viper.SetDefault("web.key", "/etc/ssl/key/ssl-cert-snakeoil.key")
|
||||
viper.SetDefault("web.loghttp", true)
|
||||
viper.SetDefault("web.maxfilesizemb", 100)
|
||||
viper.SetDefault("web.maxfilesizemb", 100)
|
||||
viper.SetDefault("db.host", "localhost")
|
||||
viper.SetDefault("db.port", 5432)
|
||||
viper.SetDefault("db.user", "scanfile")
|
||||
viper.SetDefault("db.database", "scanfile")
|
||||
viper.SetDefault("db.password", "CHANGEME")
|
||||
viper.SetDefault("db.debug", false)
|
||||
viper.SetDefault("store.path", "./storage/files/")
|
||||
viper.SetDefault("store.path", "./storage/files/")
|
||||
}
|
||||
|
||||
func SaveConfig() error {
|
||||
|
|
|
@ -13,37 +13,36 @@ import (
|
|||
|
||||
// CreateFile() creates the filesystem object and the DB entry for a file
|
||||
func CreateFile(ctx context.Context, name string, fileBytes []byte) (sqlc.File, error) {
|
||||
queries := sqlc.New(pool)
|
||||
queries := sqlc.New(pool)
|
||||
file := sqlc.File{
|
||||
Name: name,
|
||||
}
|
||||
var err error
|
||||
var err error
|
||||
|
||||
bl2hash := blake2b.Sum256(fileBytes)
|
||||
file.Blake2 = bl2hash[:]
|
||||
file.Size = int64(len(fileBytes))
|
||||
bl2hash := blake2b.Sum256(fileBytes)
|
||||
file.Blake2 = bl2hash[:]
|
||||
file.Size = int64(len(fileBytes))
|
||||
|
||||
file.Mimetype, _ = store.GetBytesFileType(fileBytes[:262])
|
||||
file.Mimetype, _ = store.GetBytesFileType(fileBytes[:262])
|
||||
|
||||
file, err = queries.CreateFile(ctx, sqlc.CreateFileParams{})
|
||||
if err == pgx.ErrNoRows {
|
||||
slog.Info("File already exists", "file-uuid", file.ID.String())
|
||||
return file, nil
|
||||
}
|
||||
if err != nil {
|
||||
slog.Error("Error saving file to database", "error", err, "file-name", name)
|
||||
err = nil
|
||||
}
|
||||
if err == pgx.ErrNoRows {
|
||||
slog.Info("File already exists", "file-uuid", file.ID.String())
|
||||
return file, nil
|
||||
}
|
||||
if err != nil {
|
||||
slog.Error("Error saving file to database", "error", err, "file-name", name)
|
||||
err = nil
|
||||
}
|
||||
|
||||
|
||||
//Using UUIDs instead of the file hash to make switching storage backends easier
|
||||
//Using UUIDs instead of the file hash to make switching storage backends easier
|
||||
_, err = store.SaveFile(file.ID.String(), fileBytes)
|
||||
if err != nil {
|
||||
slog.Error("Error saving file to disk", "error", err, "file-uuid", file.ID.String())
|
||||
errdel := queries.DeleteFile(ctx, file.ID)
|
||||
if errdel != nil {
|
||||
slog.Error("Error deleting file from database", "error", errdel, "file-uuid", file.ID.String())
|
||||
}
|
||||
slog.Error("Error saving file to disk", "error", err, "file-uuid", file.ID.String())
|
||||
errdel := queries.DeleteFile(ctx, file.ID)
|
||||
if errdel != nil {
|
||||
slog.Error("Error deleting file from database", "error", errdel, "file-uuid", file.ID.String())
|
||||
}
|
||||
return file, err
|
||||
}
|
||||
|
||||
|
|
|
@ -11,7 +11,6 @@ import (
|
|||
"github.com/spf13/viper"
|
||||
)
|
||||
|
||||
|
||||
var pool *pgxpool.Pool
|
||||
|
||||
func Connect() (*pgxpool.Pool, error) {
|
||||
|
@ -24,24 +23,24 @@ func Connect() (*pgxpool.Pool, error) {
|
|||
Host: fmt.Sprintf("%s:%d", viper.GetString("db.host"), viper.GetInt("db.port")),
|
||||
Path: fmt.Sprintf("/%s", viper.GetString("db.database")),
|
||||
}
|
||||
conn, err := pgxpool.New(context.Background(), dburl.String())
|
||||
conn, err := pgxpool.New(context.Background(), dburl.String())
|
||||
if err != nil {
|
||||
slog.Error("Could not connect to DB", "url", dburl.Redacted(), "error", err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
pool = conn
|
||||
pool = conn
|
||||
|
||||
return conn, nil
|
||||
}
|
||||
|
||||
func Ping() error {
|
||||
err := pool.Ping(context.Background())
|
||||
if err != nil {
|
||||
slog.Error("Error connecting to DB", "error", err)
|
||||
return err
|
||||
} else {
|
||||
slog.Info("DB: Pong")
|
||||
return nil
|
||||
}
|
||||
err := pool.Ping(context.Background())
|
||||
if err != nil {
|
||||
slog.Error("Error connecting to DB", "error", err)
|
||||
return err
|
||||
} else {
|
||||
slog.Info("DB: Pong")
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,22 +9,39 @@ import (
|
|||
)
|
||||
|
||||
func GetFileByID(fileID string) (sqlc.File, error) {
|
||||
var pgUUID pgtype.UUID
|
||||
err := pgUUID.Scan(fileID)
|
||||
if err != nil {
|
||||
slog.Error("Unable to convert string to UUID", "file-uuid", fileID, "error", err)
|
||||
}
|
||||
query := sqlc.New(pool)
|
||||
file, err := query.GetFileByUUID(context.Background(), pgUUID)
|
||||
var pgUUID pgtype.UUID
|
||||
err := pgUUID.Scan(fileID)
|
||||
if err != nil {
|
||||
slog.Error("Unable to convert string to UUID", "file-uuid", fileID, "error", err)
|
||||
}
|
||||
query := sqlc.New(pool)
|
||||
file, err := query.GetFileByUUID(context.Background(), pgUUID)
|
||||
|
||||
return file, nil
|
||||
return file, nil
|
||||
}
|
||||
|
||||
func InsertFileProperties(properties sqlc.InsertFilePropertiesParams) error {
|
||||
query := sqlc.New(pool)
|
||||
err := query.InsertFileProperties(context.Background(), properties)
|
||||
if err != nil {
|
||||
slog.Error("Unable to add file properties", "file-uuid", properties.ID.String(), "error", err)
|
||||
}
|
||||
return err
|
||||
func InsertFileProperties(properties sqlc.InsertFilePropertiesParams) error {
|
||||
query := sqlc.New(pool)
|
||||
err := query.InsertFileProperties(context.Background(), properties)
|
||||
if err != nil {
|
||||
slog.Error("Unable to add file properties", "file-uuid", properties.ID.String(), "error", err)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// InsertJsonResult() into one of the following tables:
|
||||
// diec, msoffice_mraptor, msoffice_oleid, msoffice_olevba
|
||||
func InsertJsonResult(fileID pgtype.UUID, data []byte, table string) error {
|
||||
query := sqlc.New(pool)
|
||||
var err error
|
||||
switch table {
|
||||
case "diec":
|
||||
err = query.InsertFileDIEC(context.Background(), sqlc.InsertFileDIECParams{FileID: fileID, Data: data})
|
||||
case "msoffice_oleid":
|
||||
|
||||
}
|
||||
if err != nil {
|
||||
slog.Error("Unable to insert DIEC results", "file-uuid", fileID.String(), "error", err)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
|
|
@ -10,63 +10,63 @@ import (
|
|||
|
||||
// NewProcessingJob() Creates a new Processing Job in the Database
|
||||
func NewProcessingJob(ctx context.Context, fileid pgtype.UUID, jobType string) (sqlc.ProcessingJob, error) {
|
||||
job := sqlc.ProcessingJob{}
|
||||
job.FileID = fileid
|
||||
query := sqlc.New(pool)
|
||||
job, err := query.CreateProcessingJob(ctx, fileid)
|
||||
job := sqlc.ProcessingJob{}
|
||||
job.FileID = fileid
|
||||
query := sqlc.New(pool)
|
||||
job, err := query.CreateProcessingJob(ctx, fileid)
|
||||
|
||||
if err != nil {
|
||||
slog.Error("Unable to create new processing job", "file-uuid", fileid.String())
|
||||
return job, err
|
||||
}
|
||||
if err != nil {
|
||||
slog.Error("Unable to create new processing job", "file-uuid", fileid.String())
|
||||
return job, err
|
||||
}
|
||||
|
||||
return job, nil
|
||||
return job, nil
|
||||
}
|
||||
|
||||
// StartProcessingJob() starts the job
|
||||
func StartProcessingJob(jobid int64) error {
|
||||
query := sqlc.New(pool)
|
||||
err := query.StartProcessingJob(context.Background(), jobid)
|
||||
if err != nil {
|
||||
slog.Error("Unable to start processing job", "job-id", jobid)
|
||||
}
|
||||
return err
|
||||
query := sqlc.New(pool)
|
||||
err := query.StartProcessingJob(context.Background(), jobid)
|
||||
if err != nil {
|
||||
slog.Error("Unable to start processing job", "job-id", jobid)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// FinishProcessingJob() marks the job as completed
|
||||
func FinishProcessingJob(jobid int64) error {
|
||||
query := sqlc.New(pool)
|
||||
err := query.FinishProcessingJob(context.Background(), jobid)
|
||||
if err != nil {
|
||||
slog.Error("Unable to finish processing job", "job-id", jobid)
|
||||
}
|
||||
return err
|
||||
query := sqlc.New(pool)
|
||||
err := query.FinishProcessingJob(context.Background(), jobid)
|
||||
if err != nil {
|
||||
slog.Error("Unable to finish processing job", "job-id", jobid)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// FailProcessingJob() marks the job as completed
|
||||
func FailProcessingJob(jobid int64, jobErr error) error {
|
||||
slog.Error("Job failed", "job-id", jobid, "error", jobErr)
|
||||
query := sqlc.New(pool)
|
||||
var params sqlc.FailProcessingJobParams
|
||||
params.ID = jobid
|
||||
params.Error.String = jobErr.Error()
|
||||
err := query.FailProcessingJob(context.Background(), params)
|
||||
if err != nil {
|
||||
slog.Error("Unable to mark processing job as failed", "job-id", jobid, "error", err)
|
||||
}
|
||||
return err
|
||||
slog.Error("Job failed", "job-id", jobid, "error", jobErr)
|
||||
query := sqlc.New(pool)
|
||||
var params sqlc.FailProcessingJobParams
|
||||
params.ID = jobid
|
||||
params.Error.String = jobErr.Error()
|
||||
err := query.FailProcessingJob(context.Background(), params)
|
||||
if err != nil {
|
||||
slog.Error("Unable to mark processing job as failed", "job-id", jobid, "error", err)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func AddProcessingJobMessage(jobid int64, message string) error {
|
||||
_, err := pool.Exec(context.Background(),
|
||||
`
|
||||
_, err := pool.Exec(context.Background(),
|
||||
`
|
||||
UPDATE processing_jobs
|
||||
SET messages = messages || $2::JSONB
|
||||
WHERE id = $1;
|
||||
`,
|
||||
jobid, message)
|
||||
if err != nil {
|
||||
slog.Error("Unable to finish processing job", "job-id", jobid)
|
||||
}
|
||||
return err
|
||||
`,
|
||||
jobid, message)
|
||||
if err != nil {
|
||||
slog.Error("Unable to finish processing job", "job-id", jobid)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
|
|
@ -3,4 +3,27 @@ INSERT INTO file_properties (
|
|||
id, sha256, md5, libmagic_mime, libmagic_extension, libmagic_apple
|
||||
) VALUES ($1, $2, $3, $4, $5, $6);
|
||||
|
||||
-- name: GetFileProperties :one
|
||||
SELECT * FROM file_properties
|
||||
WHERE id = $1;
|
||||
|
||||
-- name: InsertFileDIEC :exec
|
||||
INSERT INTO diec (
|
||||
file_id, data
|
||||
) VALUES ($1, $2);
|
||||
|
||||
-- name: InsertFileMsofficeOleid :exec
|
||||
INSERT INTO msoffice_oleid (
|
||||
file_id, data
|
||||
) VALUES ($1, $2);
|
||||
|
||||
-- name: InsertFileMsofficeOlevba :exec
|
||||
INSERT INTO msoffice_olevba (
|
||||
file_id, data
|
||||
) VALUES ($1, $2);
|
||||
|
||||
-- name: InsertFileMsofficeMraptor :exec
|
||||
INSERT INTO msoffice_mraptor (
|
||||
file_id, data
|
||||
) VALUES ($1, $2);
|
||||
|
||||
|
|
|
@ -10,31 +10,34 @@ import (
|
|||
"git.jmbit.de/jmb/scanfile/server/internal/store"
|
||||
)
|
||||
|
||||
//BasicProcessing() determines type agnostic information about the file
|
||||
// BasicProcessing() determines type agnostic information about the file
|
||||
func BasicProcessing(job sqlc.ProcessingJob) error {
|
||||
fileBytes, err := store.GetFileBytes(job.FileID.String())
|
||||
if err != nil {
|
||||
database.FailProcessingJob(job.ID, err)
|
||||
return err
|
||||
}
|
||||
sha256sum := sha256.Sum256(fileBytes)
|
||||
md5sum := md5.Sum(fileBytes)
|
||||
fileCmdResult, err := FileCmd(job.FileID.String())
|
||||
if err != nil {
|
||||
slog.Error("Error processing file", "file-uuid", job.FileID.String(), "error", err)
|
||||
return err
|
||||
}
|
||||
fileBytes, err := store.GetFileBytes(job.FileID.String())
|
||||
if err != nil {
|
||||
database.FailProcessingJob(job.ID, err)
|
||||
return err
|
||||
}
|
||||
sha256sum := sha256.Sum256(fileBytes)
|
||||
md5sum := md5.Sum(fileBytes)
|
||||
fileCmdResult, err := FileCmd(job.FileID.String())
|
||||
if err != nil {
|
||||
database.FailProcessingJob(job.ID, err)
|
||||
slog.Error("Error processing file", "file-uuid", job.FileID.String(), "error", err)
|
||||
return err
|
||||
}
|
||||
|
||||
fileProperties := sqlc.InsertFilePropertiesParams{}
|
||||
fileProperties.ID.Bytes = job.FileID.Bytes
|
||||
fileProperties.Md5 = md5sum[:]
|
||||
fileProperties.Sha256 = sha256sum[:]
|
||||
fileProperties.LibmagicMime.String = fileCmdResult.MimeType
|
||||
fileProperties.LibmagicApple.String = fileCmdResult.Apple
|
||||
fileProperties.LibmagicExtension.String = fileCmdResult.Extension
|
||||
database.InsertFileProperties(fileProperties)
|
||||
fileProperties := sqlc.InsertFilePropertiesParams{}
|
||||
fileProperties.ID.Bytes = job.FileID.Bytes
|
||||
fileProperties.Md5 = md5sum[:]
|
||||
fileProperties.Sha256 = sha256sum[:]
|
||||
fileProperties.LibmagicMime.String = fileCmdResult.MimeType
|
||||
fileProperties.LibmagicApple.String = fileCmdResult.Apple
|
||||
fileProperties.LibmagicExtension.String = fileCmdResult.Extension
|
||||
err = database.InsertFileProperties(fileProperties)
|
||||
if err != nil {
|
||||
slog.Error("Error inserting basic file properties into database", "file-uuid", job.FileID.String(), "error", err)
|
||||
return err
|
||||
}
|
||||
|
||||
|
||||
return nil
|
||||
return nil
|
||||
}
|
||||
|
||||
|
|
25
server/internal/processing/basic/diec.go
Normal file
25
server/internal/processing/basic/diec.go
Normal file
|
@ -0,0 +1,25 @@
|
|||
package basic
|
||||
|
||||
import (
|
||||
"log/slog"
|
||||
"os/exec"
|
||||
|
||||
"git.jmbit.de/jmb/scanfile/server/internal/store"
|
||||
)
|
||||
|
||||
// DiecScan() runs diec -jdu on the file
|
||||
func DiecScan(fileName string) ([]byte, error) {
|
||||
var by []byte
|
||||
filepath, err := store.AbsPath(fileName)
|
||||
if err != nil {
|
||||
slog.Error("Error in DiecScan", "file-uuid", fileName, "error", err)
|
||||
return by, err
|
||||
}
|
||||
cmd := exec.Command("/usr/bin/diec", "-jdu", filepath)
|
||||
result, err := cmd.Output()
|
||||
if err != nil {
|
||||
slog.Error("Error in DiecScan", "file-uuid", fileName, "error", err)
|
||||
return by, err
|
||||
}
|
||||
return result, nil
|
||||
}
|
|
@ -8,7 +8,6 @@ import (
|
|||
"git.jmbit.de/jmb/scanfile/server/internal/store"
|
||||
)
|
||||
|
||||
|
||||
type FileCmdResult struct {
|
||||
Type string
|
||||
MimeType string
|
||||
|
@ -16,38 +15,39 @@ type FileCmdResult struct {
|
|||
Extension string
|
||||
}
|
||||
|
||||
//FileCmd() runs "/usr/bin/file" on the object. Should be replaced with libmagic bindings instead
|
||||
// FileCmd() runs "/usr/bin/file" on the object. Should be replaced with libmagic bindings instead
|
||||
func FileCmd(fileName string) (FileCmdResult, error) {
|
||||
var returnStruct FileCmdResult
|
||||
filepath, err := store.AbsPath(fileName)
|
||||
filepath, err := store.AbsPath(fileName)
|
||||
if err != nil {
|
||||
slog.Error("Error in FileCmd", "file-uuid", fileName, "error", err)
|
||||
return returnStruct, err
|
||||
}
|
||||
cmd := exec.Command("/usr/bin/file", "-b", filepath)
|
||||
result, err := cmd.Output()
|
||||
if err != nil {
|
||||
slog.Error("Error running file command", "file-uuid", fileName, "error", err)
|
||||
slog.Error("Error running file command", "file-uuid", fileName, "error", err)
|
||||
return returnStruct, err
|
||||
}
|
||||
returnStruct.Type = strings.TrimRight(string(result), "\n ")
|
||||
cmd = exec.Command("/usr/bin/file", "-b", "--mime-type", filepath)
|
||||
result, err = cmd.Output()
|
||||
if err != nil {
|
||||
slog.Error("Error running file (mime-type) command", "file-uuid", fileName, "error", err)
|
||||
slog.Error("Error running file (mime-type) command", "file-uuid", fileName, "error", err)
|
||||
return returnStruct, err
|
||||
}
|
||||
returnStruct.MimeType = strings.TrimRight(string(result), "\n ")
|
||||
cmd = exec.Command("/usr/bin/file", "-b", "--apple", filepath)
|
||||
result, err = cmd.Output()
|
||||
if err != nil {
|
||||
slog.Error("Error running file (apple) command", "file-uuid", fileName, "error", err)
|
||||
slog.Error("Error running file (apple) command", "file-uuid", fileName, "error", err)
|
||||
return returnStruct, err
|
||||
}
|
||||
returnStruct.Apple = strings.TrimRight(string(result), "\n ")
|
||||
cmd = exec.Command("/usr/bin/file", "-b", "--extension", filepath)
|
||||
result, err = cmd.Output()
|
||||
if err != nil {
|
||||
slog.Error("Error running file (extension) command", "file-uuid", fileName, "error", err)
|
||||
slog.Error("Error running file (extension) command", "file-uuid", fileName, "error", err)
|
||||
return returnStruct, err
|
||||
}
|
||||
returnStruct.Extension = strings.TrimRight(string(result), "\n ")
|
||||
|
|
|
@ -14,27 +14,27 @@ import (
|
|||
|
||||
var semaphore chan struct{}
|
||||
var swg *sync.WaitGroup
|
||||
//Used to determine if a task was started by a previous instance that stalled out or died
|
||||
|
||||
// Used to determine if a task was started by a previous instance that stalled out or died
|
||||
var startup time.Time
|
||||
|
||||
func Setup(wg *sync.WaitGroup) {
|
||||
semaphore = make(chan struct{}, viper.GetInt("processing.maxparallel"))
|
||||
startup = time.Now()
|
||||
semaphore = make(chan struct{}, viper.GetInt("processing.maxparallel"))
|
||||
startup = time.Now()
|
||||
}
|
||||
|
||||
// Submit() starts the analysis process for a file.
|
||||
func Submit(ctx context.Context, file pgtype.UUID ) error {
|
||||
job, err := database.NewProcessingJob(ctx, file, TypeBasic)
|
||||
if err != nil {
|
||||
slog.Error("Could not submit processing job", "error", err, "file-uuid", file)
|
||||
return err
|
||||
}
|
||||
go processJob(job)
|
||||
func Submit(ctx context.Context, file pgtype.UUID) error {
|
||||
job, err := database.NewProcessingJob(ctx, file, TypeBasic)
|
||||
if err != nil {
|
||||
slog.Error("Could not submit processing job", "error", err, "file-uuid", file)
|
||||
return err
|
||||
}
|
||||
go processJob(job)
|
||||
|
||||
return nil
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
func processJob(job sqlc.ProcessingJob) {
|
||||
|
||||
}
|
||||
|
|
|
@ -5,71 +5,76 @@ import (
|
|||
"strings"
|
||||
)
|
||||
|
||||
|
||||
const TypeBasic = "Basic"
|
||||
|
||||
// Microsoft Office Document
|
||||
const TypeMSOffice = "MSOffice"
|
||||
|
||||
// Microsoft Windows Portable Executable
|
||||
const TypePE = "PE"
|
||||
|
||||
// Linux/UNIX Executable Linkable Format
|
||||
const TypeELF = "ELF"
|
||||
|
||||
// Java Archive (JAR)
|
||||
const TypeJAR = "JAR"
|
||||
|
||||
// Archives (compressed etc.)
|
||||
const TypeArchive = "Archive"
|
||||
|
||||
// Anything not implemented (yet)
|
||||
const TypeOther = "Other"
|
||||
|
||||
var MSOfficeMime = []string{
|
||||
"application/msword",
|
||||
"application/vnd.ms-excel",
|
||||
"application/vnd.ms-powerpoint",
|
||||
"application/vnd.openxmlformats-officedocument.wordprocessingml.document",
|
||||
"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
|
||||
"application/vnd.openxmlformats-officedocument.presentationml.presentation",
|
||||
"application/msword",
|
||||
"application/vnd.ms-excel",
|
||||
"application/vnd.ms-powerpoint",
|
||||
"application/vnd.openxmlformats-officedocument.wordprocessingml.document",
|
||||
"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
|
||||
"application/vnd.openxmlformats-officedocument.presentationml.presentation",
|
||||
}
|
||||
|
||||
var ELFMime = []string{
|
||||
"application/x-executable",
|
||||
"application/x-executable",
|
||||
}
|
||||
|
||||
var PEMime = []string{
|
||||
"application/vnd.microsoft.portable-executable",
|
||||
"application/vnd.microsoft.portable-executable",
|
||||
}
|
||||
|
||||
var ArchiveMime = []string{
|
||||
"application/epub+zip",
|
||||
"application/zip",
|
||||
"application/x-tar",
|
||||
"application/vnd.rar",
|
||||
"application/gzip",
|
||||
"application/x-bzip2",
|
||||
"application/x-7z-compressed",
|
||||
"application/x-xz",
|
||||
"application/zstd",
|
||||
"application/x-iso9660-image",
|
||||
"application/x-google-chrome-extension",
|
||||
"application/vnd.ms-cab-compressed",
|
||||
"application/vnd.debian.binary-package",
|
||||
"application/x-unix-archive",
|
||||
"application/x-compress",
|
||||
"application/x-lzip",
|
||||
"application/x-rpm",
|
||||
"application/dicom",
|
||||
"application/epub+zip",
|
||||
"application/zip",
|
||||
"application/x-tar",
|
||||
"application/vnd.rar",
|
||||
"application/gzip",
|
||||
"application/x-bzip2",
|
||||
"application/x-7z-compressed",
|
||||
"application/x-xz",
|
||||
"application/zstd",
|
||||
"application/x-iso9660-image",
|
||||
"application/x-google-chrome-extension",
|
||||
"application/vnd.ms-cab-compressed",
|
||||
"application/vnd.debian.binary-package",
|
||||
"application/x-unix-archive",
|
||||
"application/x-compress",
|
||||
"application/x-lzip",
|
||||
"application/x-rpm",
|
||||
"application/dicom",
|
||||
}
|
||||
|
||||
func TypeFromMime(mimetype string) string {
|
||||
if strings.HasPrefix(mimetype, "application") {
|
||||
if slices.Contains(ELFMime, mimetype) {
|
||||
return TypeELF
|
||||
} else if slices.Contains(PEMime, mimetype) {
|
||||
return TypePE
|
||||
} else if slices.Contains(MSOfficeMime, mimetype) {
|
||||
return TypeMSOffice
|
||||
} else if slices.Contains(ArchiveMime, mimetype) {
|
||||
return TypeArchive
|
||||
}
|
||||
}
|
||||
if strings.HasPrefix(mimetype, "application") {
|
||||
if slices.Contains(ELFMime, mimetype) {
|
||||
return TypeELF
|
||||
} else if slices.Contains(PEMime, mimetype) {
|
||||
return TypePE
|
||||
} else if slices.Contains(MSOfficeMime, mimetype) {
|
||||
return TypeMSOffice
|
||||
} else if slices.Contains(ArchiveMime, mimetype) {
|
||||
return TypeArchive
|
||||
}
|
||||
}
|
||||
|
||||
return TypeOther
|
||||
return TypeOther
|
||||
}
|
||||
|
|
|
@ -11,7 +11,7 @@ func RegisterRoutes() *http.ServeMux {
|
|||
mux.HandleFunc("/", web.IndexWebHandler)
|
||||
mux.HandleFunc("/about", web.AboutWebHandler)
|
||||
mux.HandleFunc("/files/{uuid}", web.FileViewWebHandler)
|
||||
mux.HandleFunc("POST /upload", web.IndexUploadHandler)
|
||||
mux.HandleFunc("POST /upload", web.IndexUploadHandler)
|
||||
mux.Handle("/assets/", http.FileServer(http.FS(web.Files)))
|
||||
return mux
|
||||
}
|
||||
|
|
|
@ -11,6 +11,89 @@ import (
|
|||
"github.com/jackc/pgx/v5/pgtype"
|
||||
)
|
||||
|
||||
const getFileProperties = `-- name: GetFileProperties :one
|
||||
SELECT id, sha256, md5, libmagic_mime, libmagic_extension, libmagic_apple FROM file_properties
|
||||
WHERE id = $1
|
||||
`
|
||||
|
||||
func (q *Queries) GetFileProperties(ctx context.Context, id pgtype.UUID) (FileProperty, error) {
|
||||
row := q.db.QueryRow(ctx, getFileProperties, id)
|
||||
var i FileProperty
|
||||
err := row.Scan(
|
||||
&i.ID,
|
||||
&i.Sha256,
|
||||
&i.Md5,
|
||||
&i.LibmagicMime,
|
||||
&i.LibmagicExtension,
|
||||
&i.LibmagicApple,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const insertFileDIEC = `-- name: InsertFileDIEC :exec
|
||||
INSERT INTO diec (
|
||||
file_id, data
|
||||
) VALUES ($1, $2)
|
||||
`
|
||||
|
||||
type InsertFileDIECParams struct {
|
||||
FileID pgtype.UUID
|
||||
Data []byte
|
||||
}
|
||||
|
||||
func (q *Queries) InsertFileDIEC(ctx context.Context, arg InsertFileDIECParams) error {
|
||||
_, err := q.db.Exec(ctx, insertFileDIEC, arg.FileID, arg.Data)
|
||||
return err
|
||||
}
|
||||
|
||||
const insertFileMsofficeMraptor = `-- name: InsertFileMsofficeMraptor :exec
|
||||
INSERT INTO msoffice_mraptor (
|
||||
file_id, data
|
||||
) VALUES ($1, $2)
|
||||
`
|
||||
|
||||
type InsertFileMsofficeMraptorParams struct {
|
||||
FileID pgtype.UUID
|
||||
Data []byte
|
||||
}
|
||||
|
||||
func (q *Queries) InsertFileMsofficeMraptor(ctx context.Context, arg InsertFileMsofficeMraptorParams) error {
|
||||
_, err := q.db.Exec(ctx, insertFileMsofficeMraptor, arg.FileID, arg.Data)
|
||||
return err
|
||||
}
|
||||
|
||||
const insertFileMsofficeOleid = `-- name: InsertFileMsofficeOleid :exec
|
||||
INSERT INTO msoffice_oleid (
|
||||
file_id, data
|
||||
) VALUES ($1, $2)
|
||||
`
|
||||
|
||||
type InsertFileMsofficeOleidParams struct {
|
||||
FileID pgtype.UUID
|
||||
Data []byte
|
||||
}
|
||||
|
||||
func (q *Queries) InsertFileMsofficeOleid(ctx context.Context, arg InsertFileMsofficeOleidParams) error {
|
||||
_, err := q.db.Exec(ctx, insertFileMsofficeOleid, arg.FileID, arg.Data)
|
||||
return err
|
||||
}
|
||||
|
||||
const insertFileMsofficeOlevba = `-- name: InsertFileMsofficeOlevba :exec
|
||||
INSERT INTO msoffice_olevba (
|
||||
file_id, data
|
||||
) VALUES ($1, $2)
|
||||
`
|
||||
|
||||
type InsertFileMsofficeOlevbaParams struct {
|
||||
FileID pgtype.UUID
|
||||
Data []byte
|
||||
}
|
||||
|
||||
func (q *Queries) InsertFileMsofficeOlevba(ctx context.Context, arg InsertFileMsofficeOlevbaParams) error {
|
||||
_, err := q.db.Exec(ctx, insertFileMsofficeOlevba, arg.FileID, arg.Data)
|
||||
return err
|
||||
}
|
||||
|
||||
const insertFileProperties = `-- name: InsertFileProperties :exec
|
||||
INSERT INTO file_properties (
|
||||
id, sha256, md5, libmagic_mime, libmagic_extension, libmagic_apple
|
||||
|
|
|
@ -33,7 +33,7 @@ func GetFileType(fileId string) (string, error) {
|
|||
return kind.MIME.Value, nil
|
||||
}
|
||||
|
||||
//Returns the MimeType for a []byte
|
||||
// Returns the MimeType for a []byte
|
||||
// We only have to pass the file header = first 261 bytes
|
||||
func GetBytesFileType(data []byte) (string, error) {
|
||||
kind, err := filetype.Match(data)
|
||||
|
|
|
@ -33,7 +33,7 @@ func SaveFile(fileName string, fileBytes []byte) (string, error) {
|
|||
slog.Error("could not create file on disk,", "error", err, "file-uuid", fileName)
|
||||
return "", err
|
||||
}
|
||||
defer osFile.Close()
|
||||
defer osFile.Close()
|
||||
_, err = osFile.Write(fileBytes)
|
||||
if err != nil {
|
||||
slog.Error("could not write file content,", "error", err, "file-uuid", fileName)
|
||||
|
@ -74,15 +74,15 @@ func AbsPath(fileName string) (string, error) {
|
|||
}
|
||||
|
||||
func GetFileBytes(fileName string) ([]byte, error) {
|
||||
var fileBytes []byte
|
||||
file, err := OpenFile(fileName)
|
||||
if err != nil {
|
||||
return fileBytes, err
|
||||
}
|
||||
_, err = file.Read(fileBytes)
|
||||
if err != nil {
|
||||
var fileBytes []byte
|
||||
file, err := OpenFile(fileName)
|
||||
if err != nil {
|
||||
return fileBytes, err
|
||||
}
|
||||
_, err = file.Read(fileBytes)
|
||||
if err != nil {
|
||||
slog.Error("could not read file content,", "error", err, "file-uuid", fileName)
|
||||
return fileBytes, err
|
||||
}
|
||||
return fileBytes, nil
|
||||
return fileBytes, err
|
||||
}
|
||||
return fileBytes, nil
|
||||
}
|
||||
|
|
|
@ -34,16 +34,16 @@ func main() {
|
|||
log.SetOutput(os.Stderr)
|
||||
slog.SetDefault(slog.New(slog.NewTextHandler(os.Stderr, nil)))
|
||||
config.ReadConfigFile("")
|
||||
var wg sync.WaitGroup
|
||||
var wg sync.WaitGroup
|
||||
database.Connect()
|
||||
database.Ping()
|
||||
database.Ping()
|
||||
store.SetupStore()
|
||||
processing.Setup(&wg)
|
||||
processing.Setup(&wg)
|
||||
if viper.GetBool("web.tls") {
|
||||
slog.Info("Starting HTTPS server")
|
||||
slog.Info("Starting HTTPS server")
|
||||
server.NewServer().ListenAndServeTLS(viper.GetString("web.cert"), viper.GetString("web.key"))
|
||||
} else {
|
||||
slog.Info("Starting HTTP server")
|
||||
slog.Info("Starting HTTP server")
|
||||
server.NewServer().ListenAndServe()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@ import (
|
|||
)
|
||||
|
||||
func FileViewWebHandler(w http.ResponseWriter, r *http.Request) {
|
||||
file, err := database.FileByUUID(r.PathValue("uuid"))
|
||||
file, err := database.FileByUUID(r.PathValue("uuid"))
|
||||
if err != nil {
|
||||
slog.Error("Error getting File in FileViewWebHandler", "error", err, "file-uuid", r.PathValue("uuid"))
|
||||
http.Error(w, err.Error(), http.StatusBadRequest)
|
||||
|
|
|
@ -15,7 +15,7 @@ func IndexWebHandler(w http.ResponseWriter, r *http.Request) {
|
|||
if err != nil {
|
||||
slog.Error("Error rendering in IndexWebHandler", "error", err)
|
||||
http.Error(w, err.Error(), http.StatusBadRequest)
|
||||
return
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -33,27 +33,27 @@ func IndexUploadHandler(w http.ResponseWriter, r *http.Request) {
|
|||
if err != nil {
|
||||
slog.Error("Error parsing form in IndexUploadHandler", "error", err)
|
||||
http.Error(w, err.Error(), http.StatusBadRequest)
|
||||
return
|
||||
return
|
||||
}
|
||||
fileBytes, err := io.ReadAll(fileData)
|
||||
fileBytes, err := io.ReadAll(fileData)
|
||||
if err != nil {
|
||||
slog.Error("Error reading file in IndexUploadHandler", "error", err)
|
||||
http.Error(w, err.Error(), http.StatusBadRequest)
|
||||
return
|
||||
return
|
||||
}
|
||||
|
||||
file, err := database.CreateFile(r.Context(), fileHeader.Filename, fileBytes)
|
||||
if err != nil {
|
||||
slog.Error("Error saving file in IndexUploadHandler", "error", err)
|
||||
http.Error(w, err.Error(), http.StatusBadRequest)
|
||||
return
|
||||
return
|
||||
}
|
||||
|
||||
component := UploadSuccessCard(file)
|
||||
component := UploadSuccessCard(file)
|
||||
err = component.Render(r.Context(), w)
|
||||
if err != nil {
|
||||
slog.Error("Error rendering in IndexUploadHandler", "error", err)
|
||||
http.Error(w, err.Error(), http.StatusBadRequest)
|
||||
return
|
||||
return
|
||||
}
|
||||
}
|
||||
|
|
|
@ -498,7 +498,7 @@ var internalSvgData = map[string]string{
|
|||
<path d="M8 12a2 2 0 0 0 2-2V8H8" />
|
||||
<path d="M14 12a2 2 0 0 0 2-2V8h-2" />`,
|
||||
"navigation": `<polygon points="3 11 22 2 13 21 11 13 3 11" />`,
|
||||
"minus": `<path d="M5 12h14" />`,
|
||||
"minus": `<path d="M5 12h14" />`,
|
||||
"list-check": `<path d="M11 18H3" />
|
||||
<path d="m15 18 2 2 4-4" />
|
||||
<path d="M16 12H3" />
|
||||
|
@ -2471,7 +2471,7 @@ var internalSvgData = map[string]string{
|
|||
"toy-brick": `<rect width="18" height="12" x="3" y="8" rx="1" />
|
||||
<path d="M10 8V5c0-.6-.4-1-1-1H6a1 1 0 0 0-1 1v3" />
|
||||
<path d="M19 8V5c0-.6-.4-1-1-1h-3a1 1 0 0 0-1 1v3" />`,
|
||||
"wrench": `<path d="M14.7 6.3a1 1 0 0 0 0 1.4l1.6 1.6a1 1 0 0 0 1.4 0l3.77-3.77a6 6 0 0 1-7.94 7.94l-6.91 6.91a2.12 2.12 0 0 1-3-3l6.91-6.91a6 6 0 0 1 7.94-7.94l-3.76 3.76z" />`,
|
||||
"wrench": `<path d="M14.7 6.3a1 1 0 0 0 0 1.4l1.6 1.6a1 1 0 0 0 1.4 0l3.77-3.77a6 6 0 0 1-7.94 7.94l-6.91 6.91a2.12 2.12 0 0 1-3-3l6.91-6.91a6 6 0 0 1 7.94-7.94l-3.76 3.76z" />`,
|
||||
"rectangle-horizontal": `<rect width="20" height="12" x="2" y="6" rx="2" />`,
|
||||
"map-pin-check-inside": `<path d="M20 10c0 4.993-5.539 10.193-7.399 11.799a1 1 0 0 1-1.202 0C9.539 20.193 4 14.993 4 10a8 8 0 0 1 16 0" />
|
||||
<path d="m9 10 2 2 4-4" />`,
|
||||
|
@ -4515,7 +4515,7 @@ var internalSvgData = map[string]string{
|
|||
"ticket-plus": `<path d="M2 9a3 3 0 0 1 0 6v2a2 2 0 0 0 2 2h16a2 2 0 0 0 2-2v-2a3 3 0 0 1 0-6V7a2 2 0 0 0-2-2H4a2 2 0 0 0-2 2Z" />
|
||||
<path d="M9 12h6" />
|
||||
<path d="M12 9v6" />`,
|
||||
"wifi-zero": `<path d="M12 20h.01" />`,
|
||||
"wifi-zero": `<path d="M12 20h.01" />`,
|
||||
"type-outline": `<path d="M14 16.5a.5.5 0 0 0 .5.5h.5a2 2 0 0 1 0 4H9a2 2 0 0 1 0-4h.5a.5.5 0 0 0 .5-.5v-9a.5.5 0 0 0-.5-.5h-3a.5.5 0 0 0-.5.5V8a2 2 0 0 1-4 0V5a2 2 0 0 1 2-2h16a2 2 0 0 1 2 2v3a2 2 0 0 1-4 0v-.5a.5.5 0 0 0-.5-.5h-3a.5.5 0 0 0-.5.5Z" />`,
|
||||
"circle-pause": `<circle cx="12" cy="12" r="10" />
|
||||
<line x1="10" x2="10" y1="15" y2="9" />
|
||||
|
@ -4657,7 +4657,7 @@ var internalSvgData = map[string]string{
|
|||
<path d="M4 18v3" />
|
||||
<path d="M8 14v7" />`,
|
||||
"chevron-down": `<path d="m6 9 6 6 6-6" />`,
|
||||
"chevron-up": `<path d="m18 15-6-6-6 6" />`,
|
||||
"chevron-up": `<path d="m18 15-6-6-6 6" />`,
|
||||
"dice-5": `<rect width="18" height="18" x="3" y="3" rx="2" ry="2" />
|
||||
<path d="M16 8h.01" />
|
||||
<path d="M8 8h.01" />
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
// templui component icon - version: main installed by templui v0.71.0
|
||||
package icon
|
||||
|
||||
// This file is auto generated
|
||||
// Using Lucide icons version 0.507.0
|
||||
var ALargeSmall = Icon("a-large-small")
|
||||
|
|
Loading…
Add table
Reference in a new issue