Feat: Sincronizador.

This commit is contained in:
José Yeri 2025-10-15 10:43:58 -04:00
commit f81a1d5a81
39 changed files with 4242 additions and 0 deletions

7
.dockerignore Normal file
View File

@ -0,0 +1,7 @@
.git
*.md
*.zip
*.log
#*.env
tmp/
tests/

40
.env Normal file
View File

@ -0,0 +1,40 @@
#Configuración de la base de datos
DB_HOST=localhost
DB_PORT=5434
DB_USER=admin
DB_PASSWORD=admin
DB_NAME=gotestdb
DB_MAX_OPEN_CONNS=25
DB_MAX_IDLE_CONNS=10
DB_CONN_MAX_LIFETIME=30m
#Redis
REDIS_HOST=10.0.0.112
REDIS_PORT=6379
REDIS_PASSWORD=TuPasswordSegura123
REDIS_DB=0
REDIS_TTL=10m
REDIS_SUBSCRIBE=cron:reload
#Log
LOG_FILE_PATH=logs/syncronizador.log
LOG_LEVEL=debug
LOG_MAX_SIZE=10
LOG_MAX_BACKUPS=7
LOG_MAX_AGE=30
LOG_COMPRESS=true
#Cliente Rest
TIMEOUT_SECONDS= 30
RETRY_COUNT=3
TLS_SKIP_VERIFY=true
LOG_REQUESTS=false
ENABLE_DEBUG= false
WHERE_UNITS_BUSINESS=company_name = @company_name AND company_db = @company_db AND status = 'A'
ENVIRONMENT=development
# Elastic
#ELASTIC_URL=http://host.docker.internal:9200
ELASTIC_URL=http://10.0.0.124:9200
ELASTIC_ENABLED=true
ENCRYPTION_KEY=12345678901234567890123456789012

167
.gitignore vendored Normal file
View File

@ -0,0 +1,167 @@
# Created by https://www.toptal.com/developers/gitignore/api/go,goland
# Edit at https://www.toptal.com/developers/gitignore?templates=go,goland
### Go ###
# If you prefer the allow list template instead of the deny list, see community template:
# https://github.com/github/gitignore/blob/main/community/Golang/Go.AllowList.gitignore
#
# Binaries for programs and plugins
*.exe
*.exe~
*.dll
*.so
*.dylib
# Test binary, built with `go test -c`
*.test
# Output of the go coverage tool, specifically when used with LiteIDE
*.out
# Dependency directories (remove the comment below to include it)
# vendor/
# Go workspace file
go.work
### GoLand ###
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
# User-specific stuff
.idea/**/workspace.xml
.idea/**/tasks.xml
.idea/**/usage.statistics.xml
.idea/**/dictionaries
.idea/**/shelf
# AWS User-specific
.idea/**/aws.xml
# Generated files
.idea/**/contentModel.xml
# Sensitive or high-churn files
.idea/**/dataSources/
.idea/**/dataSources.ids
.idea/**/dataSources.local.xml
.idea/**/sqlDataSources.xml
.idea/**/dynamic.xml
.idea/**/uiDesigner.xml
.idea/**/dbnavigator.xml
# Gradle
.idea/**/gradle.xml
.idea/**/libraries
# Gradle and Maven with auto-import
# When using Gradle or Maven with auto-import, you should exclude module files,
# since they will be recreated, and may cause churn. Uncomment if using
# auto-import.
# .idea/artifacts
# .idea/compiler.xml
# .idea/jarRepositories.xml
# .idea/modules.xml
# .idea/*.iml
# .idea/modules
# *.iml
# *.ipr
# CMake
cmake-build-*/
# Mongo Explorer plugin
.idea/**/mongoSettings.xml
# File-based project format
*.iws
# IntelliJ
out/
# mpeltonen/sbt-idea plugin
.idea_modules/
# JIRA plugin
atlassian-ide-plugin.xml
# Cursive Clojure plugin
.idea/replstate.xml
# SonarLint plugin
.idea/sonarlint/
# Crashlytics plugin (for Android Studio and IntelliJ)
com_crashlytics_export_strings.xml
crashlytics.properties
crashlytics-build.properties
fabric.properties
# Editor-based Rest Client
.idea/httpRequests
# Android studio 3.1+ serialized cache file
.idea/caches/build_file_checksums.ser
### GoLand Patch ###
# Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721
# *.iml
# modules.xml
# .idea/misc.xml
# *.ipr
# Sonarlint plugin
# https://plugins.jetbrains.com/plugin/7973-sonarlint
.idea/**/sonarlint/
# SonarQube Plugin
# https://plugins.jetbrains.com/plugin/7238-sonarqube-community-plugin
.idea/**/sonarIssues.xml
# Markdown Navigator plugin
# https://plugins.jetbrains.com/plugin/7896-markdown-navigator-enhanced
.idea/**/markdown-navigator.xml
.idea/**/markdown-navigator-enh.xml
.idea/**/markdown-navigator/
# Cache file creation bug
# See https://youtrack.jetbrains.com/issue/JBR-2257
.idea/$CACHE_FILE$
# CodeStream plugin
# https://plugins.jetbrains.com/plugin/12206-codestream
.idea/codestream.xml
# Azure Toolkit for IntelliJ plugin
# https://plugins.jetbrains.com/plugin/8053-azure-toolkit-for-intellij
.idea/**/azureSettings.xml
# End of https://www.toptal.com/developers/gitignore/api/go,goland
# Created by https://www.toptal.com/developers/gitignore/api/visualstudiocode
# Edit at https://www.toptal.com/developers/gitignore?templates=visualstudiocode
### VisualStudioCode ###
.vscode/*
!.vscode/settings.json
!.vscode/tasks.json
!.vscode/launch.json
!.vscode/extensions.json
!.vscode/*.code-snippets
# Local History for Visual Studio Code
.history/
# Built Visual Studio Code Extensions
*.vsix
### VisualStudioCode Patch ###
# Ignore all local history of files
.history
.ionide
# End of https://www.toptal.com/developers/gitignore/api/visualstudiocode
logs/rest_myapps_orbit.log

30
Dockerfile Normal file
View File

@ -0,0 +1,30 @@
# Stage 1: Compila el binario
FROM golang:1.24-alpine AS builder
WORKDIR /app
# Copiar módulos primero (cache)
COPY go.mod go.sum ./
RUN go mod download
# Copiar todo y compilar
COPY . .
RUN go build -o go-sync-service ./cmd/go-sync-service
# Stage 2: Imagen final mínima
FROM alpine:latest
WORKDIR /root/
COPY --from=builder /app/go-sync-service .
COPY .env .env
ENV TZ=America/La_Paz
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
# Puerto si lo usas
EXPOSE 9100
# Ejecutar binario
CMD ["./go-sync-service"]

32
Dockerfile copy Normal file
View File

@ -0,0 +1,32 @@
# ---------------------------------------------------
# 1) Builder: compila API, worker y prepara goose
# ---------------------------------------------------
FROM golang:1.24 AS builder
RUN apt-get update && apt-get install -y tzdata
ENV TZ=America/La_Paz
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
# Configurar el directorio de trabajo dentro del contenedor
WORKDIR /app
# 1.1 Dependencias (caché)
COPY go.mod go.sum ./
RUN go mod tidy
# 1.2 Código fuente
COPY . .
# Instalar OpenSSL (necesario para generar los certificados)
#RUN apt-get update && apt-get install -y openssl && rm -rf /var/lib/apt/lists/*
# Copiar el script entrypoint a la carpeta /docker-entrypoint-init.d/ y dar permisos de ejecución
#COPY scripts/entrypoint.sh /docker-entrypoint-init.d/entrypoint.sh
#RUN chmod +x /docker-entrypoint-init.d/entrypoint.sh
# Configurar el entrypoint para que ejecute el script
#ENTRYPOINT ["/bin/sh", "/docker-entrypoint-init.d/entrypoint.sh"]
# Comando que se ejecutará tras el entrypoint (por ejemplo, levantar la API)
CMD ["go", "run", "cmd/go-sync-service/main.go"]

198
README.md Normal file
View File

@ -0,0 +1,198 @@
\# 📦 go-sync-service
\*\*go-sync-service\*\* es un microservicio escrito en Go para la ejecución de tareas de sincronización basadas en cron, integrando Redis como sistema de control y configuración dinámica, y PostgreSQL mediante GORM como base de datos. Está diseñado con una arquitectura modular y desacoplada para facilitar su reutilización y escalabilidad.
---
\## 🚀 Características
\- 🔁 Ejecución de tareas periódicas con `robfig/cron`
\- 📦 Carga de configuraciones de tareas desde Redis (configuración dinámica)
\- 🧩 Arquitectura limpia con separación por capas (`ports`, `dto`, `config`, `scheduler`)
\- 🛠️ Servicio desacoplado de sincronización con posibilidad de extensiones
\- 🗂️ Organización clara del código con submódulos reutilizables
\- 🗃️ Persistencia con PostgreSQL vía `gorm.io/gorm`
\- 🧠 Logger centralizado
---
\## 🛠️ Requisitos
\- Go 1.20+
\- Redis
\- PostgreSQL
\- Docker (opcional para despliegue)
---
\## 📁 Estructura del Proyecto
```
go-sync-service/
├── cmd/
│ └── go-sync-service/ # Punto de entrada: main.go
├── internal/
│ ├── config/ # Configuración de BD, Redis, logger
│ ├── scheduler/ # Orquestador de cron jobs
│ ├── sync/ # Lógica de sincronización (fetchers)
│ ├── domain/
│ │ ├── dto/ # Estructuras de datos
│ │ └── ports/ # Interfaces de abstracción
├── README.md
└── go.mod / go.sum
```
---
\## ▶️ Cómo ejecutar
\### 1. Clonar el proyecto
```bash
git clone https://github.com/tuusuario/go-sync-service.git
cd go-sync-service
```
\### 2. Configurar variables de entorno
Crear un archivo `.env` o configurar los valores necesarios:
```env
DB\_HOST=localhost
DB\_PORT=5432
DB\_NAME=sync\_db
DB\_USER=postgres
DB\_PASS=secret
REDIS\_ADDR=localhost:6379
REDIS\_PASS=
```
\### 3. Ejecutar el servicio
```bash
go run cmd/go-sync-service/main.go
```
---
\## 🧪 Pruebas
Puedes lanzar las pruebas (si las implementas) con:
```bash
go test ./...
```
---
\## 📌 Notas técnicas
\- Las configuraciones de cron (`CronConfigList`) se cargan desde Redis mediante un esquema de deserialización JSON.
\- El archivo `internal/scheduler/manager.go` coordina la carga y reinicio dinámico de tareas cuando hay cambios en la configuración Redis.
\- La interfaz `RedisConfigProvider` permite desacoplar la fuente de datos de configuración, haciéndolo fácilmente intercambiable.
---
\## 🧑‍💻 Autor
Desarrollado por \[tuusuario]

View File

@ -0,0 +1,68 @@
package main
import (
"context"
"fmt"
"net/http"
"os"
"os/signal"
"syscall"
"github.com/prometheus/client_golang/prometheus/promhttp"
"github.com/tuusuario/go-sync-service/internal/config"
"github.com/tuusuario/go-sync-service/internal/scheduler"
"github.com/tuusuario/go-sync-service/metrics"
)
func main() {
// Cargar configuración
conf, err := config.LoadConfig()
if err != nil {
fmt.Println("❌ Error cargando configuración:", err)
os.Exit(1)
}
// Inicializar logger con configuración
config.InitLogger(conf)
config.Log.Info("🚀 Iniciando servicio: go-sync-service")
// Conexión a Redis
redisClient := config.GetRedisClient(conf)
if err := redisClient.Ping(context.Background()).Err(); err != nil {
config.Log.Fatalf("❌ Redis no disponible: %v", err)
}
config.Log.Info("✅ Redis conectado")
// Crear proveedor de configuración desde Redis
redisManager := config.NewRedisManager(redisClient)
redisConfigProvider := config.NewRedisProvider(redisManager, context.Background())
// Conexión a Base de Datos
database := config.GetDatabaseConnection(conf)
if database == nil {
config.Log.Fatal("❌ No se pudo establecer la conexión con la base de datos.")
}
config.Log.Info("✅ Conexión a base de datos establecida")
scheduler.Start(context.Background(), redisClient, redisConfigProvider, database)
config.Log.Info("✅ Scheduler en ejecución y escuchando recargas")
//metrics Grafana
metrics.Register()
startMetricsServer()
// Esperar señal del sistema para cerrar ordenadamente
stop := make(chan os.Signal, 1)
signal.Notify(stop, os.Interrupt, syscall.SIGTERM)
<-stop
config.Log.Info("🛑 Señal de apagado recibida, cerrando servicio...")
}
func startMetricsServer() {
go func() {
http.Handle("/metrics", promhttp.Handler())
config.Log.Info("📊 Servidor de métricas en :9100/metrics")
http.ListenAndServe(":9100", nil)
}()
}

BIN
dump.rdb Normal file

Binary file not shown.

57
elastic/elastic_hook.go Normal file
View File

@ -0,0 +1,57 @@
package elastic
import (
"bytes"
"context"
"encoding/json"
"time"
"github.com/elastic/go-elasticsearch"
"github.com/sirupsen/logrus"
)
type ElasticHook struct {
Client *elasticsearch.Client
Index string
}
func NewElasticHook(client *elasticsearch.Client, index string) *ElasticHook {
return &ElasticHook{
Client: client,
Index: index,
}
}
func (hook *ElasticHook) Fire(entry *logrus.Entry) error {
doc := map[string]interface{}{
"@timestamp": entry.Time.Format(time.RFC3339),
"timestamp": entry.Time.Format(time.RFC3339),
"level": entry.Level.String(),
"message": entry.Message,
"module": func() interface{} {
if val, ok := entry.Data["module"]; ok {
return val
}
return "default"
}(),
"file": entry.Caller.File,
"line": entry.Caller.Line,
}
var buf bytes.Buffer
if err := json.NewEncoder(&buf).Encode(doc); err != nil {
return err
}
_, err := hook.Client.Index(
hook.Index,
&buf,
hook.Client.Index.WithContext(context.Background()),
hook.Client.Index.WithRefresh("true"),
)
return err
}
func (hook *ElasticHook) Levels() []logrus.Level {
return logrus.AllLevels
}

60
go.mod Normal file
View File

@ -0,0 +1,60 @@
module github.com/tuusuario/go-sync-service
go 1.24.4
require (
github.com/go-resty/resty/v2 v2.16.5
github.com/jackc/pgx/v5 v5.7.5 // indirect
github.com/redis/go-redis/v9 v9.10.0
github.com/robfig/cron/v3 v3.0.1
)
require (
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
github.com/fsnotify/fsnotify v1.8.0 // indirect
github.com/go-viper/mapstructure/v2 v2.2.1 // indirect
github.com/jackc/puddle/v2 v2.2.2 // indirect
github.com/jinzhu/inflection v1.0.0 // indirect
github.com/jinzhu/now v1.1.5 // indirect
github.com/pelletier/go-toml/v2 v2.2.3 // indirect
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
github.com/sagikazarmark/locafero v0.7.0 // indirect
github.com/sourcegraph/conc v0.3.0 // indirect
github.com/spf13/afero v1.12.0 // indirect
github.com/spf13/cast v1.7.1 // indirect
github.com/spf13/pflag v1.0.6 // indirect
github.com/subosito/gotenv v1.6.0 // indirect
go.uber.org/atomic v1.9.0 // indirect
go.uber.org/multierr v1.9.0 // indirect
golang.org/x/sync v0.13.0 // indirect
golang.org/x/sys v0.32.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
gorm.io/gorm v1.30.0
)
require (
github.com/cespare/xxhash/v2 v2.3.0 // indirect
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
github.com/jackc/pgpassfile v1.0.0 // indirect
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect
github.com/prometheus/client_golang v1.22.0
github.com/sirupsen/logrus v1.9.3
github.com/spf13/viper v1.20.1
golang.org/x/crypto v0.37.0 // indirect
golang.org/x/net v0.33.0 // indirect
golang.org/x/text v0.24.0 // indirect
gopkg.in/natefinch/lumberjack.v2 v2.2.1
gorm.io/driver/postgres v1.6.0
)
require github.com/elastic/go-elasticsearch v0.0.0
require (
github.com/beorn7/perks v1.0.1 // indirect
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect
github.com/prometheus/client_model v0.6.1 // indirect
github.com/prometheus/common v0.62.0 // indirect
github.com/prometheus/procfs v0.15.1 // indirect
google.golang.org/protobuf v1.36.5 // indirect
)

119
go.sum Normal file
View File

@ -0,0 +1,119 @@
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
github.com/bsm/ginkgo/v2 v2.12.0 h1:Ny8MWAHyOepLGlLKYmXG4IEkioBysk6GpaRTLC8zwWs=
github.com/bsm/ginkgo/v2 v2.12.0/go.mod h1:SwYbGRRDovPVboqFv0tPTcG1sN61LM1Z4ARdbAV9g4c=
github.com/bsm/gomega v1.27.10 h1:yeMWxP2pV2fG3FgAODIY8EiRE3dy0aeFYt4l7wh6yKA=
github.com/bsm/gomega v1.27.10/go.mod h1:JyEr/xRbxbtgWNi8tIEVPUYZ5Dzef52k01W3YH0H+O0=
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78=
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc=
github.com/elastic/go-elasticsearch v0.0.0 h1:Pd5fqOuBxKxv83b0+xOAJDAkziWYwFinWnBO0y+TZaA=
github.com/elastic/go-elasticsearch v0.0.0/go.mod h1:TkBSJBuTyFdBnrNqoPc54FN0vKf5c04IdM4zuStJ7xg=
github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
github.com/fsnotify/fsnotify v1.8.0 h1:dAwr6QBTBZIkG8roQaJjGof0pp0EeF+tNV7YBP3F/8M=
github.com/fsnotify/fsnotify v1.8.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0=
github.com/go-resty/resty/v2 v2.16.5 h1:hBKqmWrr7uRc3euHVqmh1HTHcKn99Smr7o5spptdhTM=
github.com/go-resty/resty/v2 v2.16.5/go.mod h1:hkJtXbA2iKHzJheXYvQ8snQES5ZLGKMwQ07xAwp/fiA=
github.com/go-viper/mapstructure/v2 v2.2.1 h1:ZAaOCxANMuZx5RCeg0mBdEZk7DZasvvZIxtHqx8aGss=
github.com/go-viper/mapstructure/v2 v2.2.1/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM=
github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg=
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 h1:iCEnooe7UlwOQYpKFhBabPMi4aNAfoODPEFNiAnClxo=
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM=
github.com/jackc/pgx/v5 v5.7.5 h1:JHGfMnQY+IEtGM63d+NGMjoRpysB2JBwDr5fsngwmJs=
github.com/jackc/pgx/v5 v5.7.5/go.mod h1:aruU7o91Tc2q2cFp5h4uP3f6ztExVpyVv88Xl/8Vl8M=
github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo=
github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E=
github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc=
github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ=
github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo=
github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ=
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc=
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA=
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
github.com/pelletier/go-toml/v2 v2.2.3 h1:YmeHyLY8mFWbdkNWwpr+qIL2bEqT0o95WSdkNHvL12M=
github.com/pelletier/go-toml/v2 v2.2.3/go.mod h1:MfCQTFTvCcUyyvvwm1+G6H/jORL20Xlb6rzQu9GuUkc=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/prometheus/client_golang v1.22.0 h1:rb93p9lokFEsctTys46VnV1kLCDpVZ0a/Y92Vm0Zc6Q=
github.com/prometheus/client_golang v1.22.0/go.mod h1:R7ljNsLXhuQXYZYtw6GAE9AZg8Y7vEW5scdCXrWRXC0=
github.com/prometheus/client_model v0.6.1 h1:ZKSh/rekM+n3CeS952MLRAdFwIKqeY8b62p8ais2e9E=
github.com/prometheus/client_model v0.6.1/go.mod h1:OrxVMOVHjw3lKMa8+x6HeMGkHMQyHDk9E3jmP2AmGiY=
github.com/prometheus/common v0.62.0 h1:xasJaQlnWAeyHdUBeGjXmutelfJHWMRr+Fg4QszZ2Io=
github.com/prometheus/common v0.62.0/go.mod h1:vyBcEuLSvWos9B1+CyL7JZ2up+uFzXhkqml0W5zIY1I=
github.com/prometheus/procfs v0.15.1 h1:YagwOFzUgYfKKHX6Dr+sHT7km/hxC76UB0learggepc=
github.com/prometheus/procfs v0.15.1/go.mod h1:fB45yRUv8NstnjriLhBQLuOUt+WW4BsoGhij/e3PBqk=
github.com/redis/go-redis/v9 v9.10.0 h1:FxwK3eV8p/CQa0Ch276C7u2d0eNC9kCmAYQ7mCXCzVs=
github.com/redis/go-redis/v9 v9.10.0/go.mod h1:huWgSWd8mW6+m0VPhJjSSQ+d6Nh1VICQ6Q5lHuCH/Iw=
github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs=
github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro=
github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ=
github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog=
github.com/sagikazarmark/locafero v0.7.0 h1:5MqpDsTGNDhY8sGp0Aowyf0qKsPrhewaLSsFaodPcyo=
github.com/sagikazarmark/locafero v0.7.0/go.mod h1:2za3Cg5rMaTMoG/2Ulr9AwtFaIppKXTRYnozin4aB5k=
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo=
github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0=
github.com/spf13/afero v1.12.0 h1:UcOPyRBYczmFn6yvphxkn9ZEOY65cpwGKb5mL36mrqs=
github.com/spf13/afero v1.12.0/go.mod h1:ZTlWwG4/ahT8W7T0WQ5uYmjI9duaLQGy3Q2OAl4sk/4=
github.com/spf13/cast v1.7.1 h1:cuNEagBQEHWN1FnbGEjCXL2szYEXqfJPbP2HNUaca9Y=
github.com/spf13/cast v1.7.1/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo=
github.com/spf13/pflag v1.0.6 h1:jFzHGLGAlb3ruxLB8MhbI6A8+AQX/2eW4qeyNZXNp2o=
github.com/spf13/pflag v1.0.6/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/spf13/viper v1.20.1 h1:ZMi+z/lvLyPSCoNtFCpqjy0S4kPbirhpTMwl8BkW9X4=
github.com/spf13/viper v1.20.1/go.mod h1:P9Mdzt1zoHIG8m2eZQinpiBjo6kCmZSKBClNNqjJvu4=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8=
github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU=
go.uber.org/atomic v1.9.0 h1:ECmE8Bn/WFTYwEW/bpKD3M8VtR/zQVbavAoalC1PYyE=
go.uber.org/atomic v1.9.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
go.uber.org/multierr v1.9.0 h1:7fIwc/ZtS0q++VgcfqFDxSBZVv/Xo49/SYnDFupUwlI=
go.uber.org/multierr v1.9.0/go.mod h1:X2jQV1h+kxSjClGpnseKVIxpmcjrj7MNnI0bnlfKTVQ=
golang.org/x/crypto v0.37.0 h1:kJNSjF/Xp7kU0iB2Z+9viTPMW4EqqsrywMXLJOOsXSE=
golang.org/x/crypto v0.37.0/go.mod h1:vg+k43peMZ0pUMhYmVAWysMK35e6ioLh3wB8ZCAfbVc=
golang.org/x/net v0.33.0 h1:74SYHlV8BIgHIFC/LrYkOGIwL19eTYXQ5wc6TBuO36I=
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
golang.org/x/sync v0.13.0 h1:AauUjRAJ9OSnvULf/ARrrVywoJDy0YS2AwQ98I37610=
golang.org/x/sync v0.13.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.32.0 h1:s77OFDvIQeibCmezSnk/q6iAfkdiQaJi4VzroCFrN20=
golang.org/x/sys v0.32.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
golang.org/x/text v0.24.0 h1:dd5Bzh4yt5KYA8f9CJHCP4FB4D51c2c6JvN37xJJkJ0=
golang.org/x/text v0.24.0/go.mod h1:L8rBsPeo2pSS+xqN0d5u2ikmjtmoJbDBT1b7nHvFCdU=
golang.org/x/time v0.8.0 h1:9i3RxcPv3PZnitoVGMPDKZSq1xW1gK1Xy3ArNOGZfEg=
golang.org/x/time v0.8.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
google.golang.org/protobuf v1.36.5 h1:tPhr+woSbjfYvY6/GPufUoYizxw1cF/yFoxJ2fmpwlM=
google.golang.org/protobuf v1.36.5/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
gopkg.in/natefinch/lumberjack.v2 v2.2.1 h1:bBRl1b0OH9s/DuPhuXpNl+VtCaJXFZ5/uEFST95x9zc=
gopkg.in/natefinch/lumberjack.v2 v2.2.1/go.mod h1:YD8tP3GAjkrDg1eZH7EGmyESg/lsYskCTPBJVb9jqSc=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gorm.io/driver/postgres v1.6.0 h1:2dxzU8xJ+ivvqTRph34QX+WrRaJlmfyPqXmoGVjMBa4=
gorm.io/driver/postgres v1.6.0/go.mod h1:vUw0mrGgrTK+uPHEhAdV4sfFELrByKVGnaVRkXDhtWo=
gorm.io/gorm v1.30.0 h1:qbT5aPv1UH8gI99OsRlvDToLxW5zR7FzS9acZDOZcgs=
gorm.io/gorm v1.30.0/go.mod h1:8Z33v652h4//uMA76KjeDH8mJXPm1QNCYrMeatR0DOE=

75
internal/config/config.go Normal file
View File

@ -0,0 +1,75 @@
package config
import (
"fmt"
"os"
"time"
"github.com/spf13/viper"
)
// Config almacena las configuraciones globales
type Config struct {
DBHost string `mapstructure:"DB_HOST"`
DBPort int `mapstructure:"DB_PORT"`
DBUser string `mapstructure:"DB_USER"`
DBPassword string `mapstructure:"DB_PASSWORD"`
DBName string `mapstructure:"DB_NAME"`
DBMaxOpenConns int `mapstructure:"DB_MAX_OPEN_CONNS"`
DBMaxIdleConns int `mapstructure:"DB_MAX_IDLE_CONNS"`
DBConnMaxLifetime time.Duration `mapstructure:"DB_CONN_MAX_LIFETIME"`
// Redis
RedisHost string `mapstructure:"REDIS_HOST"`
RedisPort int `mapstructure:"REDIS_PORT"`
RedisPassword string `mapstructure:"REDIS_PASSWORD"`
RedisDB int `mapstructure:"REDIS_DB"`
RedisTTL time.Duration `mapstructure:"REDIS_TTL"` // tiempo de vida por defecto de claves
RedisSubscribe string `mapstructure:"REDIS_SUBSCRIBE"`
LogFilePath string `mapstructure:"LOG_FILE_PATH"`
LogLevel string `mapstructure:"LOG_LEVEL"`
LogMaxSize int `mapstructure:"LOG_MAX_SIZE"`
LogMaxBackups int `mapstructure:"LOG_MAX_BACKUPS"`
LogMaxAge int `mapstructure:"LOG_MAX_AGE"`
LogCompress bool `mapstructure:"LOG_COMPRESS"`
Environment string `mapstructure:"ENVIRONMENT"`
WhereUnitsBusiness string `mapstructure:"WHERE_UNITS_BUSINESS"`
ElasticURL string `mapstructure:"ELASTIC_URL"`
ElasticEnabled bool `mapstructure:"ELASTIC_ENABLED"`
TimeoutSeconds int `mapstructure:"TIMEOUT_SECONDS"`
RetryCount int `mapstructure:"RETRY_COUNT"`
TLSSkipVerify bool `mapstructure:"TLS_SKIP_VERIFY"`
LogRequests bool `mapstructure:"LOG_REQUESTS"`
EnableDebug bool `mapstructure:"ENABLE_DEBUG"`
}
var GlobalConfig *Config
// LoadConfig carga las variables de entorno usando viper
func LoadConfig() (*Config, error) {
viper.SetConfigName(".env")
viper.SetConfigType("env")
viper.AddConfigPath(".")
viper.AutomaticEnv()
if err := viper.ReadInConfig(); err != nil {
fmt.Println("⚠️ No se pudo leer el archivo .env, usando variables de entorno del sistema")
}
var config Config
if err := viper.Unmarshal(&config); err != nil {
return nil, err
}
GlobalConfig = &config
// --- 👇 **clave**: exportar ENCRYPTION_KEY al entorno del proceso ---
if k := viper.GetString("ENCRYPTION_KEY"); k != "" {
_ = os.Setenv("ENCRYPTION_KEY", k)
}
fmt.Println("✅ Configuración .env cargada correctamente.")
return &config, nil
}

View File

@ -0,0 +1,63 @@
package config
import (
"fmt"
"log"
"sync"
"time"
"gorm.io/driver/postgres"
"gorm.io/gorm"
"gorm.io/gorm/logger"
)
var (
db *gorm.DB
once sync.Once
dbErr error
)
// GetDB retorna la conexión a la base de datos
func GetDatabaseConnection(cfg *Config) *gorm.DB {
once.Do(func() {
dsn := fmt.Sprintf(
"host=%s port=%d user=%s password=%s dbname=%s sslmode=disable",
cfg.DBHost, cfg.DBPort, cfg.DBUser, cfg.DBPassword, cfg.DBName,
)
// Reintentos automáticos
maxRetries := 3
for i := 0; i < maxRetries; i++ {
db, dbErr = gorm.Open(postgres.Open(dsn), &gorm.Config{
Logger: logger.Default.LogMode(logger.Silent),
})
if dbErr == nil {
break
}
log.Printf("⚠️ Intento %d de conexión fallido: %v", i+1, dbErr)
time.Sleep(2 * time.Second)
}
if dbErr != nil {
log.Fatalf("❌ No se pudo conectar a la BD después de varios intentos: %v", dbErr)
}
// Configuración del pool
sqlDB, err := db.DB()
if err != nil {
log.Fatalf("❌ Error al obtener objeto SQL: %v", err)
}
// Validación inmediata
if err := sqlDB.Ping(); err != nil {
log.Fatalf("❌ Error al hacer ping a la BD: %v", err)
}
sqlDB.SetMaxOpenConns(cfg.DBMaxOpenConns) // Conexiones máximas
sqlDB.SetMaxIdleConns(cfg.DBMaxIdleConns) // Conexiones inactivas permitidas
sqlDB.SetConnMaxLifetime(cfg.DBConnMaxLifetime * time.Minute) // Tiempo máximo de vida
log.Println("✅ Conexión a la BD establecida correctamente")
})
return db
}

7
internal/config/keys.go Normal file
View File

@ -0,0 +1,7 @@
package config
const (
RestConfig = "parametros:rest_config"
RestLogin = "parametros:rest_login"
CronConfig = "parametros:cron_config"
)

70
internal/config/logger.go Normal file
View File

@ -0,0 +1,70 @@
package config
import (
"fmt"
"io"
"os"
"path/filepath"
"runtime"
"strings"
"time"
"github.com/elastic/go-elasticsearch"
"github.com/sirupsen/logrus"
"github.com/tuusuario/go-sync-service/elastic"
"gopkg.in/natefinch/lumberjack.v2"
)
// Log instancia global del logger
var Log = logrus.New()
func InitLogger(cfg *Config) {
// Configurar rotación de logs con Lumberjack
rotator := &lumberjack.Logger{
Filename: cfg.LogFilePath, // Archivo de logs
MaxSize: cfg.LogMaxSize, // Máximo tamaño en MB antes de rotar
MaxBackups: cfg.LogMaxBackups, // Máximo número de archivos de respaldo
MaxAge: cfg.LogMaxAge, // Máximo número de días para conservar logs
Compress: cfg.LogCompress, // Comprimir logs antiguos
}
// Configurar Logrus para escribir en el archivo rotado
Log.SetOutput(io.MultiWriter(os.Stdout, rotator))
Log.SetReportCaller(true) // 👈 esto agrega archivo y línea
// Formato JSON con timestamp
/*Log.SetFormatter(&logrus.JSONFormatter{
TimestampFormat: time.RFC3339, // Formato ISO 8601
})*/
Log.SetFormatter(&logrus.JSONFormatter{
TimestampFormat: time.RFC3339,
CallerPrettyfier: func(f *runtime.Frame) (function string, file string) {
// Extrae solo nombre del archivo y línea
fnParts := strings.Split(f.Function, "/")
return fnParts[len(fnParts)-1], fmt.Sprintf("%s:%d", filepath.Base(f.File), f.Line)
},
})
// Configurar nivel de logging (DEBUG, INFO, ERROR, etc.)
level, err := logrus.ParseLevel(cfg.LogLevel)
if err != nil {
level = logrus.InfoLevel
}
Log.SetLevel(level)
//elastic
if cfg.ElasticEnabled {
Log.Debug("✅ Elasticsearch enabled")
es, err := elasticsearch.NewClient(elasticsearch.Config{
Addresses: []string{cfg.ElasticURL},
})
if err == nil {
hook := elastic.NewElasticHook(es, "go-sync-service")
Log.AddHook(hook)
} else {
Log.Error("No se pudo conectar a Elasticsearch: ", err)
}
}
}

30
internal/config/redis.go Normal file
View File

@ -0,0 +1,30 @@
package config
import (
"context"
"fmt"
"log"
"time"
"github.com/redis/go-redis/v9"
)
func GetRedisClient(cfg *Config) *redis.Client {
addr := fmt.Sprintf("%s:%d", cfg.RedisHost, cfg.RedisPort)
client := redis.NewClient(&redis.Options{
Addr: addr,
Password: cfg.RedisPassword,
DB: cfg.RedisDB,
})
ctx, cancel := context.WithTimeout(context.Background(), 3*time.Second)
defer cancel()
_, err := client.Ping(ctx).Result()
if err != nil {
log.Fatalf("❌ Redis connection failed: %v", err)
}
return client
}

View File

@ -0,0 +1,40 @@
package config
import (
"context"
"fmt"
"log"
"time"
"github.com/redis/go-redis/v9"
)
type RedisManager struct {
redis *redis.Client
}
func NewRedisManager(redis *redis.Client) *RedisManager {
return &RedisManager{
redis: redis,
}
}
func (pm *RedisManager) GetRawValue(ctx context.Context, codigo string) (string, error) {
val, err := pm.redis.Get(ctx, codigo).Result()
if err == redis.Nil {
return "", fmt.Errorf("clave no encontrada en Redis: %s", codigo)
} else if err != nil {
return "", err
}
return val, nil
}
func (pm *RedisManager) UpdateParam(ctx context.Context, codigo string, nuevoValor string, expiration time.Duration) error {
err := pm.redis.Set(ctx, codigo, nuevoValor, expiration).Err()
if err != nil {
return fmt.Errorf("error actualizando parámetro en Redis [%s]: %w", codigo, err)
}
log.Printf("🔄 Parámetro actualizado en Redis: %s = %s", codigo, nuevoValor)
return nil
}

View File

@ -0,0 +1,37 @@
package config
import (
"context"
"strconv"
"time"
"github.com/tuusuario/go-sync-service/internal/domain/ports"
)
type RedisProvider struct {
ParamManager *RedisManager
Ctx context.Context
}
// UpdateParamInRedis implements ports.RedisConfigProvider.
func (p *RedisProvider) UpdateParam(key string, value string, expiration time.Duration) error {
return p.ParamManager.UpdateParam(p.Ctx, key, value, expiration)
}
func NewRedisProvider(pm *RedisManager, ctx context.Context) ports.RedisConfigProvider {
return &RedisProvider{ParamManager: pm, Ctx: ctx}
}
func (p *RedisProvider) GetString(key string) (string, error) {
return p.ParamManager.GetRawValue(p.Ctx, key)
}
// GetInt64 retrieves an int64 configuration value from Redis, given its key.
// It returns 0 and an error if the key is not found in Redis.
func (p *RedisProvider) GetInt64(key string) (int64, error) {
val, err := p.ParamManager.GetRawValue(p.Ctx, key)
if err != nil {
return 0, err
}
return strconv.ParseInt(val, 10, 64)
}

379
internal/db/operaciones.go Normal file
View File

@ -0,0 +1,379 @@
package db
import (
"fmt"
"regexp"
"strconv"
"strings"
"time"
"github.com/tuusuario/go-sync-service/internal/config"
"github.com/tuusuario/go-sync-service/internal/domain/dto"
"github.com/tuusuario/go-sync-service/internal/domain/model"
"gorm.io/gorm"
"gorm.io/gorm/clause"
)
type GormDatabase struct {
db *gorm.DB
}
func NewGormDatabase(db *gorm.DB) *GormDatabase {
return &GormDatabase{db: db}
}
func (g *GormDatabase) SyncRows(persistencia dto.Persistencia, rawData *[]map[string]interface{}, company_db string) error {
logPrefix := fmt.Sprintf("[🧹 Tabla: %s] ", persistencia.Table)
config.Log.Println(logPrefix + " ✅ Inicializacion Syncing data...")
now := time.Now()
batchSize := persistencia.BatchSize
if batchSize <= 0 {
batchSize = 100
}
batch := make([]map[string]interface{}, 0, batchSize)
for i, item := range *rawData {
record := map[string]interface{}{
persistencia.CampoSync: now,
}
// Copia de campos mapeados (con soporte a "a.b.c", "array[0].campo", "array[-1].campo", "array[].campo")
for column, jsonPath := range persistencia.Fields {
val := getNestedValue(item, jsonPath)
record[column] = val
}
// Campos estáticos
for k, v := range persistencia.StaticFields {
record[k] = v
}
// Forzar columnas a string si fue configurado
normalizeTypes(record, persistencia.StringifyFields)
// ========== NUEVO: construir PK compuesta si está configurada ==========
if persistencia.PrimaryKeyName != "" && len(persistencia.PrimaryKeyConcat) > 0 {
if key, ok := buildCompositeKey(record, company_db, persistencia.PrimaryKeyConcat, persistencia.PrimaryKeySeparator); ok {
record[persistencia.PrimaryKeyName] = key
}
}
batch = append(batch, record)
// Procesar lote
if len(batch) == batchSize || i == len(*rawData)-1 {
config.Log.Debugf(logPrefix+" Procesando batch de %d registros", len(batch))
if len(persistencia.UpdateBy) > 0 {
// Updates con múltiples campos
for _, row := range batch {
var whereParts []string
var whereValues []interface{}
for campoTabla, campoServicio := range persistencia.UpdateBy {
val, ok := row[campoServicio]
if !ok || val == nil {
continue
}
whereParts = append(whereParts, fmt.Sprintf("%s = ?", campoTabla))
whereValues = append(whereValues, val)
}
if len(whereParts) < len(persistencia.UpdateBy) {
config.Log.Warnf("⚠️ Registro incompleto para update (faltan claves): %+v", row)
continue
}
// Copia sin campos clave
updateData := make(map[string]interface{})
for k, v := range row {
skip := false
for campoTabla := range persistencia.UpdateBy {
if k == campoTabla {
skip = true
break
}
}
if !skip {
updateData[k] = v
}
}
res := g.db.Table(persistencia.Table).
Where(strings.Join(whereParts, " AND "), whereValues...).
Updates(updateData)
if res.Error != nil {
config.Log.Errorf("%s ❌ Error en update: %v", logPrefix, res.Error)
return res.Error
}
if res.RowsAffected == 0 {
config.Log.Warnf("%s ⚠️ Ninguna fila afectada con campos: %v valores: %v",
logPrefix, strings.Join(whereParts, " AND "), printWhereValues(whereValues))
}
}
} else {
// Inserts con conflicto por PK (UPSERT)
cols := unionColumnList(batch) // union de columnas del lote
// Elegir PK: si hay compuesta (calculada), úsala; si no, usa PrimaryKey
pkName := persistencia.PrimaryKey
if persistencia.PrimaryKeyName != "" && len(persistencia.PrimaryKeyConcat) > 0 {
pkName = persistencia.PrimaryKeyName
}
err := g.db.Table(persistencia.Table).
Clauses(clause.OnConflict{
Columns: []clause.Column{{Name: pkName}},
DoUpdates: clause.AssignmentColumns(cols),
}).
Create(&batch).Error
if err != nil {
config.Log.Errorf("%s ❌ Error en batch insert: %v", logPrefix, err)
return err
}
}
batch = batch[:0]
}
}
if persistencia.Eliminacion.Enabled {
err := g.db.Table(persistencia.Table).
Where(persistencia.Eliminacion.Field+" < ?", now).
Delete(nil).Error
if err != nil {
config.Log.Printf(logPrefix+"❌ Error eliminando obsoletos: %+v", err)
return err
}
}
config.Log.Println(logPrefix + " ✅ Finalizacion Syncing data...")
return nil
}
func unionColumnList(batch []map[string]interface{}) []string {
set := map[string]struct{}{}
for _, r := range batch {
for k := range r {
set[k] = struct{}{}
}
}
cols := make([]string, 0, len(set))
for k := range set {
cols = append(cols, k)
}
return cols
}
func (g *GormDatabase) GetCredencialesFromTemplate(whereTemplate string, variables map[string]interface{}) (*model.CredencialesSAP, error) {
var cred model.CredencialesSAP
query := whereTemplate
var args []interface{}
config.Log.Debugf("🔎 Variables recibidas:")
for k, v := range variables {
placeholder := "@" + k
query = strings.ReplaceAll(query, placeholder, "?")
args = append(args, v)
config.Log.Debugf(" %s = %v", k, v)
}
config.Log.Debugf("📝 Consulta final construida:")
config.Log.Debugf(" Query: %s", query)
config.Log.Debugf(" Args: %v", args)
err := g.db.Where(query, args...).First(&cred).Error
return &cred, err
}
func printWhereValues(whereValues []interface{}) string {
if len(whereValues) == 0 {
return ""
}
var b strings.Builder
b.WriteString(fmt.Sprintf("%v", whereValues[0]))
for i := 1; i < len(whereValues); i++ {
b.WriteString(", ")
b.WriteString(fmt.Sprintf("%v", whereValues[i]))
}
return b.String()
}
// ===== Soporte de índices [n], [-1] y [] en rutas =====
var idxRe = regexp.MustCompile(`^([^\[\]]+)(?:\[(\-?\d*)\])?$`)
// toIfaceSlice intenta ver el valor como slice de interfaces
func toIfaceSlice(v interface{}) ([]interface{}, bool) {
switch s := v.(type) {
case []interface{}:
return s, true
case []map[string]interface{}:
out := make([]interface{}, len(s))
for i := range s {
out[i] = s[i]
}
return out, true
default:
return nil, false
}
}
// getNestedValue con soporte para "contacts[0].field", "contacts[-1].field", "contacts[].field"
func getNestedValue(data map[string]interface{}, path string) interface{} {
if path == "" {
return nil
}
parts := strings.Split(path, ".")
var current interface{} = data
for _, part := range parts {
// parsea nombre + índice opcional
m := idxRe.FindStringSubmatch(part)
if len(m) == 0 {
return nil
}
key := m[1]
idxStr := m[2] // "", "0", "1", "-1", ...
// paso de mapa (key)
mp, ok := current.(map[string]interface{})
if !ok {
return nil
}
var exists bool
current, exists = mp[key]
if !exists {
return nil
}
// si hay índice (o "[]"), tratar como slice
if idxStr != "" || strings.HasSuffix(part, "[]") {
arr, ok := toIfaceSlice(current)
if !ok || len(arr) == 0 {
return nil
}
var idx int
if idxStr == "" {
// "[]" -> primero
idx = 0
} else {
n, err := strconv.Atoi(idxStr)
if err != nil {
return nil
}
if n < 0 {
n = len(arr) + n // -1 => último
}
idx = n
}
if idx < 0 || idx >= len(arr) {
return nil
}
current = arr[idx]
}
}
return current
}
// ===== NUEVO: helper para clave compuesta =====
func buildCompositeKey(row map[string]interface{}, companyDB string, concat []string, sep string) (string, bool) {
if len(concat) == 0 {
return "", false
}
if sep == "" {
sep = ":"
}
parts := make([]string, 0, len(concat))
for _, item := range concat {
if strings.HasPrefix(item, "@") { // tokens especiales
token := strings.TrimPrefix(item, "@")
switch {
case token == "company_db":
parts = append(parts, companyDB)
case strings.HasPrefix(token, "literal="):
parts = append(parts, strings.TrimPrefix(token, "literal="))
default:
// Token desconocido -> vacío (o puedes return false para forzar)
parts = append(parts, "")
}
continue
}
// tomar del row (columna ya mapeada en record)
parts = append(parts, toString(row[item]))
}
return strings.Join(parts, sep), true
}
// ===== Normalización de tipos =====
func normalizeTypes(m map[string]interface{}, stringifyFields []string) {
if len(stringifyFields) == 0 {
return
}
sf := make(map[string]struct{}, len(stringifyFields))
for _, f := range stringifyFields {
sf[f] = struct{}{}
}
for k, v := range m {
if v == nil {
continue
}
if _, ok := sf[k]; ok {
m[k] = toString(v)
}
}
}
// ===== Casteo genérico a string =====
func toString(v interface{}) string {
switch t := v.(type) {
case string:
return t
case fmt.Stringer:
return t.String()
case float64:
if t == float64(int64(t)) {
return fmt.Sprintf("%d", int64(t))
}
return fmt.Sprintf("%v", t)
case float32:
if t == float32(int64(t)) {
return fmt.Sprintf("%d", int64(t))
}
return fmt.Sprintf("%v", t)
case int:
return fmt.Sprintf("%d", t)
case int8:
return fmt.Sprintf("%d", t)
case int16:
return fmt.Sprintf("%d", t)
case int32:
return fmt.Sprintf("%d", t)
case int64:
return fmt.Sprintf("%d", t)
case uint:
return fmt.Sprintf("%d", t)
case uint8:
return fmt.Sprintf("%d", t)
case uint16:
return fmt.Sprintf("%d", t)
case uint32:
return fmt.Sprintf("%d", t)
case uint64:
return fmt.Sprintf("%d", t)
case bool:
if t {
return "true"
}
return "false"
case time.Time:
return t.Format(time.RFC3339)
default:
return fmt.Sprintf("%v", t)
}
}

View File

@ -0,0 +1,22 @@
package dto
type CronConfigList struct {
Crons []CronJob `json:"crons"`
}
type CronJob struct {
Nombre string `json:"nombre"`
UnidadNegocio UnidadNegocio `json:"unidad_negocio"`
Configuracion ConfiguracionCron `json:"configuracion"`
}
type UnidadNegocio struct {
CompanyName string `json:"company_name"`
CompanyDB string `json:"company_db"`
}
type ConfiguracionCron struct {
Ejecucion string `json:"ejecucion"` // Ej: "@every 2m"
Proceso []string `json:"proceso"` // Lista de claves para otras configuraciones
Enabled bool `json:"enabled"`
}

View File

@ -0,0 +1,69 @@
package dto
type JobConfig struct {
Auth ServiceConfig `json:"auth,omitempty"`
Service ServiceConfig `json:"service,omitempty"`
Persistencia Persistencia `json:"persistencia,omitempty"`
}
type ServiceConfig struct {
GQL bool `json:"gql,omitempty"`
Method string `json:"method"`
Path string `json:"path"`
Headers map[string]string `json:"headers"`
Rest *RestOptions `json:"rest,omitempty"`
GraphQL *GraphQLOptions `json:"graphql,omitempty"`
}
type RestOptions struct {
Body any `json:"body"`
Query map[string]string `json:"query"`
Pagination *RestPagination `json:"pagination,omitempty"`
}
type RestPagination struct {
Enabled bool `json:"enabled"`
Skip int `json:"skip"`
Top int `json:"top"`
}
type GraphQLOptions struct {
Query string `json:"query"`
RootField string `json:"root_field"`
RowField string `json:"row_field"`
Variables map[string]interface{} `json:"variables"`
Pagination *GraphQLPagination `json:"pagination"`
}
type GraphQLPagination struct {
Enabled bool `json:"enabled"`
CursorField string `json:"cursorField"`
HasNextField string `json:"hasNextField"`
LimitField string `json:"limitField"`
CursorParam string `json:"cursorParam"`
}
type Persistencia struct {
Table string `json:"table"`
BatchSize int `json:"batch_size"`
CampoSync string `json:"campo_sync"`
PrimaryKey string `json:"primary_key"`
Fields map[string]string `json:"fields"`
StaticFields map[string]interface{} `json:"static_fields"`
UpdateBy map[string]string `json:"update_by"`
Eliminacion struct {
Field string `json:"field"`
Enabled bool `json:"enabled"`
} `json:"soft_delete"`
StringifyFields []string `json:"stringify_fields"`
// 👇 NUEVO: clave compuesta precomputada
PrimaryKeyName string `json:"primary_key_name"` // p.ej. "pk_redis"
PrimaryKeyConcat []string `json:"primary_key_concat"` // p.ej. ["@company_db","card_code"]
PrimaryKeySeparator string `json:"primary_key_separator"` // p.ej. ":"
}
type Eliminacion struct {
Field string `json:"field"`
Enabled bool `json:"enabled"`
}

View File

@ -0,0 +1,9 @@
package dto
import "time"
type SessionData struct {
SessionId string `json:"session_id"`
ExpiresAt time.Time `json:"expires_at"`
EndPoint string `json:"end_point"`
}

View File

@ -0,0 +1,24 @@
package model
import "time"
type CredencialesSAP struct {
ID int `gorm:"column:id;primaryKey;autoIncrement"`
SerieSAP string `gorm:"column:serie_sap"`
CompanyName string `gorm:"column:company_name"`
BranchID int `gorm:"column:branch_id"`
CompanyDB string `gorm:"column:company_db"`
UserName string `gorm:"column:user_name"`
Password string `gorm:"column:password"` // ⚠️ Considera cifrar si es necesario
Status string `gorm:"column:status"` // Ej: 'A' activo, 'I' inactivo
UserCreated string `gorm:"column:user_created"`
DateCreated time.Time `gorm:"column:date_created"`
UserUpdated string `gorm:"column:user_updated"`
DateUpdated time.Time `gorm:"column:date_updated"`
EndPoint string `gorm:"column:end_point"`
}
func (CredencialesSAP) TableName() string {
return "business_units"
}

View File

@ -0,0 +1,12 @@
package ports
import (
"github.com/tuusuario/go-sync-service/internal/domain/dto"
"github.com/tuusuario/go-sync-service/internal/domain/model"
)
type Database interface {
SyncRows(persistencia dto.Persistencia, data *[]map[string]interface{}, company_db string) error
//GetByTemplate(filterKey string, variables map[string]interface{}) (any, error)
GetCredencialesFromTemplate(whereTemplate string, variables map[string]interface{}) (*model.CredencialesSAP, error)
}

View File

@ -0,0 +1,11 @@
package ports
import (
"time"
)
type RedisConfigProvider interface {
GetString(key string) (string, error)
GetInt64(key string) (int64, error)
UpdateParam(key string, value string, expiration time.Duration) error
}

View File

@ -0,0 +1,7 @@
package ports
type Scheduler interface {
Start()
Stop()
Reload() error
}

View File

@ -0,0 +1,5 @@
package ports
type SessionProvider interface {
GetSession(endpoint, user, password, company string) (string, error)
}

55
internal/http/client.go Normal file
View File

@ -0,0 +1,55 @@
package http
import (
"crypto/tls"
"sync"
"time"
"github.com/go-resty/resty/v2"
"github.com/tuusuario/go-sync-service/internal/config"
)
var (
clientInstance *resty.Client
once sync.Once
)
func InitClient() {
//Inicializacion del client
once.Do(func() {
client := resty.New().
SetTimeout(time.Duration(config.GlobalConfig.TimeoutSeconds) * time.Second)
client.SetTLSClientConfig(&tls.Config{InsecureSkipVerify: config.GlobalConfig.TLSSkipVerify})
if config.GlobalConfig.LogRequests {
var start time.Time // 👈 aquí declaramos la variable de tiempo
client.OnBeforeRequest(func(c *resty.Client, r *resty.Request) error {
start = time.Now()
config.Log.Printf("➡️ Request: %s %s\nHeaders: %v\nBody: %v\n",
r.Method, r.URL, r.Header, r.Body)
return nil
})
client.OnAfterResponse(func(c *resty.Client, r *resty.Response) error {
duration := time.Since(start)
config.Log.Printf("⬅️ Response: %s %s | Status: %d | Duration: %s\nHeaders: %v\nBody: %s\n",
r.Request.Method, r.Request.URL, r.StatusCode(), duration,
r.Header(), r.String())
return nil
})
}
client.SetDebug(config.GlobalConfig.EnableDebug)
config.Log.Printf("🚀 Inicializando Resty client...")
clientInstance = client
})
}
func GetClient() *resty.Client {
if clientInstance == nil {
panic("❌ Resty client no inicializado. Llama a InitClient primero.")
}
return clientInstance
}

View File

@ -0,0 +1,66 @@
package http
import (
"errors"
"fmt"
"github.com/go-resty/resty/v2"
"github.com/tuusuario/go-sync-service/internal/config"
"github.com/tuusuario/go-sync-service/internal/domain/dto"
)
func SendRequest(host string, opts dto.ServiceConfig) (*resty.Response, error) {
client := GetClient()
req := client.R()
// Establecer encabezados
for k, v := range opts.Headers {
req.SetHeader(k, v)
}
// Construir la URL completa
if host == "" {
return nil, errors.New("el host no puede estar vacío")
}
if opts.Path == "" {
return nil, errors.New("el path no puede estar vacío")
}
url := fmt.Sprintf("%s%s", host, opts.Path)
req.SetHeader("Content-Type", "application/json")
// Si es GraphQL
if opts.GraphQL != nil {
payload := map[string]interface{}{
"query": opts.GraphQL.Query,
"variables": opts.GraphQL.Variables,
}
req.SetBody(payload)
return req.Post(url)
}
// Si es REST
if opts.Rest != nil {
if opts.Rest.Query != nil {
req.SetQueryParams(opts.Rest.Query)
}
if opts.Rest.Body != nil {
req.SetBody(opts.Rest.Body)
config.Log.Println("📦 Body:", opts.Rest.Body)
}
}
// Método HTTP
switch opts.Method {
case "GET":
return req.Get(url)
case "POST":
return req.Post(url)
case "PUT":
return req.Put(url)
case "DELETE":
return req.Delete(url)
default:
return nil, fmt.Errorf("método HTTP no soportado: %s", opts.Method)
}
}

153
internal/http/session.go Normal file
View File

@ -0,0 +1,153 @@
package http
import (
"encoding/json"
"fmt" // NEW
"sync"
"time"
"github.com/tuusuario/go-sync-service/internal/config"
"github.com/tuusuario/go-sync-service/internal/domain/dto"
"github.com/tuusuario/go-sync-service/internal/domain/model"
"github.com/tuusuario/go-sync-service/internal/domain/ports"
"github.com/tuusuario/go-sync-service/internal/security"
)
var (
redisKey = "session:SAP"
mutex sync.Mutex
)
type SAPSessionProvider struct{}
func GetSession(cfg ports.RedisConfigProvider, job dto.CronJob, auth dto.ServiceConfig, dbcore ports.Database, logPrefix string) (*dto.SessionData, error) {
mutex.Lock()
defer mutex.Unlock()
redisKey = "session:" + job.UnidadNegocio.CompanyName + ":" + job.UnidadNegocio.CompanyDB
if sess, err := loadSessionFromRedis(cfg, logPrefix); err == nil && time.Now().Before(sess.ExpiresAt) {
config.Log.Printf("%v 🔑 Sesión obtenida de Redis %v", logPrefix, redisKey)
return sess, nil
}
parametros := map[string]interface{}{
"company_name": job.UnidadNegocio.CompanyName,
"company_db": job.UnidadNegocio.CompanyDB,
}
credencial, err := dbcore.GetCredencialesFromTemplate(config.GlobalConfig.WhereUnitsBusiness, parametros)
if err != nil {
config.Log.Printf("%v ❌ Error al obtener credenciales: %v", logPrefix, err)
return nil, err
}
// ==============================
// DESCIFRAR PASSWORD (AES-GCM)
// ==============================
key, err := security.LoadEncryptionKey()
if err != nil {
return nil, fmt.Errorf("%s %v", logPrefix, err)
}
// Suponemos que credencial.Password viene como base64(nonce||cipher) generado por EncryptAESGCM
plainPass, err := security.DecryptAESGCM(credencial.Password, key)
if err != nil {
config.Log.Printf("%v ❌ Error al descifrar password: %v", logPrefix, err)
return nil, err
}
credencial.Password = plainPass
// No loguees secretos. Si necesitas log, hazlo sin password:
config.Log.Debugf("%v Obteniendo credenciales para CompanyDB=%s UserName=%s (password oculto)",
logPrefix, credencial.CompanyDB, credencial.UserName)
config.Log.Printf("%v 🔑 Realizando login...", logPrefix)
mySession := &dto.SessionData{EndPoint: credencial.EndPoint}
if auth.Rest == nil {
auth.Rest = &dto.RestOptions{}
}
// 3. Preparar el body del login (usa cred.Password en claro ya descifrado)
auth = prepareAuthBody(auth, credencial, logPrefix)
config.Log.Debugf(" %v Url: %v + %v", logPrefix, credencial.EndPoint, auth.Path)
resp, err := SendRequest(credencial.EndPoint, auth)
if err != nil || resp.IsError() {
config.Log.Printf("%v ❌ Error al autenticar: %v", logPrefix, err)
return nil, err
}
if auth.GQL {
var dataGraphql struct {
Token string `json:"token"`
RefreshToken string `json:"refresh_token"`
}
if err := json.Unmarshal(resp.Body(), &dataGraphql); err != nil {
config.Log.Printf("%v ❌ Error al parsear sesión graphql: %v", logPrefix, err)
return nil, err
}
mySession.SessionId = dataGraphql.Token
mySession.ExpiresAt = time.Now().Add(10 * time.Minute)
} else {
var dataRest struct {
SessionId string `json:"SessionId"`
SessionTimeout int `json:"SessionTimeout"`
}
if err := json.Unmarshal(resp.Body(), &dataRest); err != nil {
config.Log.Printf("%v ❌ Error al parsear sesión rest: %v", logPrefix, err)
return nil, err
}
mySession.SessionId = dataRest.SessionId
mySession.ExpiresAt = time.Now().Add(time.Duration(dataRest.SessionTimeout) * time.Minute)
}
config.Log.Printf("%v ✅ Sesión obtenida", logPrefix)
saveSessionToRedis(mySession, cfg, logPrefix)
return mySession, nil
}
func loadSessionFromRedis(cfg ports.RedisConfigProvider, logPrefix string) (*dto.SessionData, error) {
raw, err := cfg.GetString(redisKey)
if err != nil {
config.Log.Printf("%v ⚠️ No se pudo obtener sesión de Redis: %v", logPrefix, err)
return nil, err
}
var sess dto.SessionData
if err := json.Unmarshal([]byte(raw), &sess); err != nil {
config.Log.Printf("%v ⚠️ No se pudo parsear sesión de Redis: %v", logPrefix, err.Error())
return nil, err
}
return &sess, nil
}
func saveSessionToRedis(sess *dto.SessionData, cfg ports.RedisConfigProvider, logPrefix string) {
data, _ := json.Marshal(sess)
ttl := time.Until(sess.ExpiresAt)
err := cfg.UpdateParam(redisKey, string(data), ttl)
if err != nil {
config.Log.Printf("%v ⚠️ No se pudo guardar sesión en Redis: %v", logPrefix, err)
}
}
func prepareAuthBody(auth dto.ServiceConfig, cred *model.CredencialesSAP, logPrefix string) dto.ServiceConfig {
if auth.Rest == nil {
auth.Rest = &dto.RestOptions{}
}
if auth.GQL {
config.Log.Debugf("%v 🧠 Preparando auth para GraphQL", logPrefix)
auth.Rest.Body = map[string]string{
"username": cred.UserName,
"password": cred.Password,
}
} else {
config.Log.Debugf("%v🧠 Preparando auth para REST", logPrefix)
auth.Rest.Body = map[string]string{
"CompanyDB": cred.CompanyDB,
"UserName": cred.UserName,
"Password": cred.Password,
}
}
return auth
}

View File

@ -0,0 +1,98 @@
/* package scheduler
import (
"context"
"github.com/redis/go-redis/v9"
"github.com/robfig/cron/v3"
"github.com/tuusuario/go-sync-service/internal/config"
"github.com/tuusuario/go-sync-service/internal/domain/dto"
"github.com/tuusuario/go-sync-service/internal/domain/ports"
fetcher "github.com/tuusuario/go-sync-service/internal/sync"
"github.com/tuusuario/go-sync-service/internal/utils"
"gorm.io/gorm"
)
var currentCron *cron.Cron
func ListenCronReload(ctx context.Context, redisClient *redis.Client, cfg ports.RedisConfigProvider, database *gorm.DB) {
pubsub := redisClient.Subscribe(ctx, "cron:reload")
ch := pubsub.Channel()
config.Log.Println("👂 Escuchando cambios de configuración en cron:reload")
for msg := range ch {
if msg.Payload == "reload" {
config.Log.Println("🔄 Recargando trabajos cron desde Redis...")
lista_cron, err := utils.CargarDesdeRedis[dto.CronJobList](cfg, config.CronConfig)
if err != nil {
config.Log.Printf("❌ Error al cargar nueva configuración de crons: %v", err)
continue
}
if currentCron != nil {
currentCron.Stop()
}
newCron := cron.New()
for _, job := range lista_cron.Crons {
if !job.Enabled {
config.Log.WithField("job", job.Nombre).Warn("⚠️ Job desactivado")
continue
}
job := job // evitar captura de variable
_, err := newCron.AddFunc(job.Ejecucion, func() {
config.Log.Printf("🚀 Ejecutando job: %s", job.Nombre)
fetcher.SyncData(cfg, database, job)
})
if err != nil {
config.Log.Printf("❌ Error registrando nuevo cron job %s: %v", job.Nombre, err)
}
}
currentCron = newCron
currentCron.Start()
config.Log.Println("✅ Crons recargados exitosamente.")
}
}
}
func LoadAndStartCrons(cfg ports.RedisConfigProvider, database *gorm.DB) {
lista_cron, err := utils.CargarDesdeRedis[dto.CronJobList](cfg, config.CronConfig)
if err != nil {
config.Log.Errorf("❌ Error al cargar configuración de crons: %v", err)
return
}
newCron := cron.New()
for _, job := range lista_cron.Crons {
if !job.Enabled {
config.Log.WithField("job", job.Nombre).Warn("⚠️ Job desactivado")
continue
}
job := job
config.Log.WithFields(map[string]interface{}{
"job": job.Nombre,
"cron": job.Ejecucion,
}).Info("📝 Registrando cron")
_, err := newCron.AddFunc(job.Ejecucion, func() {
config.Log.Infof("🚀 Ejecutando job: %s", job.Nombre)
fetcher.SyncData(cfg, database, job)
})
if err != nil {
config.Log.WithField("job", job.Nombre).Errorf("❌ Error registrando cron: %v", err)
}
}
newCron.Start()
currentCron = newCron
config.Log.Info("✅ Cron jobs cargados correctamente al iniciar")
}
*/

View File

@ -0,0 +1,26 @@
package scheduler
import (
"context"
"github.com/redis/go-redis/v9"
"github.com/tuusuario/go-sync-service/internal/config"
"github.com/tuusuario/go-sync-service/internal/domain/ports"
"gorm.io/gorm"
)
func listenCronReload(ctx context.Context, redisClient *redis.Client, cfg ports.RedisConfigProvider, dbConn *gorm.DB) {
pubsub := redisClient.Subscribe(ctx, config.GlobalConfig.RedisSubscribe)
ch := pubsub.Channel()
config.Log.Infof("👂 Escuchando cambios en %v...", config.GlobalConfig.RedisSubscribe)
for msg := range ch {
if msg.Payload == "reload" {
config.Log.Info("🔄 Recargando configuración de cron...")
if err := loadAndStartJobs(ctx, cfg, dbConn); err != nil {
config.Log.Errorf("❌ Error al recargar cron jobs: %v", err)
}
}
}
}

View File

@ -0,0 +1,62 @@
package scheduler
import (
"context"
"github.com/redis/go-redis/v9"
"github.com/robfig/cron/v3"
"github.com/tuusuario/go-sync-service/internal/config"
"github.com/tuusuario/go-sync-service/internal/domain/dto"
"github.com/tuusuario/go-sync-service/internal/domain/ports"
fetcher "github.com/tuusuario/go-sync-service/internal/sync"
"github.com/tuusuario/go-sync-service/internal/utils"
"gorm.io/gorm"
)
var currentCron *cron.Cron
func Start(ctx context.Context, redisClient *redis.Client, cfg ports.RedisConfigProvider, database *gorm.DB) {
config.Log.Info("🚀 Iniciando Scheduler...")
if err := loadAndStartJobs(ctx, cfg, database); err != nil {
config.Log.Errorf("❌ Error inicializando jobs: %v", err)
}
go listenCronReload(ctx, redisClient, cfg, database)
}
func loadAndStartJobs(ctx context.Context, cfg ports.RedisConfigProvider, dbConn *gorm.DB) error {
lista, err := utils.CargarDesdeRedis[dto.CronConfigList](cfg, config.CronConfig)
if err != nil {
return err
}
if currentCron != nil {
currentCron.Stop()
}
newCron := cron.New()
for _, job := range lista.Crons {
//Valida si el cron esta habilitado
if !job.Configuracion.Enabled {
config.Log.WithField("job", job.Nombre).Warn("⏸️ Job desactivado")
continue
}
job := job // closure-safe
_, err := newCron.AddFunc(job.Configuracion.Ejecucion, func() {
config.Log.Infof("🚀 Ejecutando job: %s", job.Nombre)
//cargar Job
fetcher.SyncData(cfg, dbConn, job)
})
if err != nil {
config.Log.Errorf("❌ Error registrando job %s: %v", job.Nombre, err)
}
}
newCron.Start()
currentCron = newCron
config.Log.Info("✅ Jobs registrados exitosamente")
return nil
}

View File

@ -0,0 +1,53 @@
package security
import (
"crypto/aes"
"crypto/cipher"
"encoding/base64"
"errors"
)
func DecryptAESGCM(b64 string, key []byte) (string, error) {
if len(key) != 32 {
return "", errors.New("encryption key must be 32 bytes for AES-256")
}
raw, err := base64.StdEncoding.DecodeString(b64)
if err != nil {
return "", err
}
block, err := aes.NewCipher(key)
if err != nil {
return "", err
}
// Probar con nonce de 12 y 16 bytes
for _, ns := range []int{12, 16} {
gcm, err := cipher.NewGCMWithNonceSize(block, ns)
if err != nil {
continue
} // si no soporta ese ns, prueba el siguiente
tagSize := gcm.Overhead()
if len(raw) < ns+tagSize+1 { // al menos 1 byte de CT
continue
}
nonce := raw[:ns]
rest := raw[ns:]
// A) nonce || ciphertext || tag (nativo Go)
if pt, err := gcm.Open(nil, nonce, rest, nil); err == nil {
return string(pt), nil
}
// B) nonce || tag || ciphertext (PyCryptodome ejemplo previo)
if len(rest) > tagSize {
tag := rest[:tagSize]
ct := rest[tagSize:]
repacked := append(ct, tag...) // => ciphertext||tag
if pt, err := gcm.Open(nil, nonce, repacked, nil); err == nil {
return string(pt), nil
}
}
}
return "", errors.New("GCM auth failed: wrong key, bad data, or unsupported layout")
}

View File

@ -0,0 +1,52 @@
package security
import (
"encoding/base64"
"encoding/hex"
"errors"
"os"
"strings"
"sync"
)
var (
keyOnce sync.Once
keyBuf []byte
keyErr error
)
// LoadEncryptionKey soporta raw (32 chars), base64(32 bytes) o hex(32 bytes).
func LoadEncryptionKey() ([]byte, error) {
keyOnce.Do(func() {
v := strings.TrimSpace(os.Getenv("ENCRYPTION_KEY"))
if v == "" {
keyErr = errors.New("ENCRYPTION_KEY no definida")
return
}
// 1) raw 32 chars
if len(v) == 32 {
keyBuf = []byte(v)
return
}
// 2) base64 (std / url)
if b, err := base64.StdEncoding.DecodeString(v); err == nil && len(b) == 32 {
keyBuf = b
return
}
if b, err := base64.URLEncoding.DecodeString(v); err == nil && len(b) == 32 {
keyBuf = b
return
}
// 3) hex (64 chars -> 32 bytes)
if b, err := hex.DecodeString(v); err == nil && len(b) == 32 {
keyBuf = b
return
}
keyErr = errors.New("ENCRYPTION_KEY inválida: debe ser 32 chars raw, base64 de 32 bytes o hex de 32 bytes")
})
return keyBuf, keyErr
}

206
internal/sync/fetcher.go Normal file
View File

@ -0,0 +1,206 @@
package fetcher
import (
"encoding/json"
"fmt"
"strconv"
"time"
"github.com/tuusuario/go-sync-service/internal/config"
"github.com/tuusuario/go-sync-service/internal/db"
"github.com/tuusuario/go-sync-service/internal/domain/dto"
"github.com/tuusuario/go-sync-service/internal/domain/ports"
"github.com/tuusuario/go-sync-service/internal/http"
"github.com/tuusuario/go-sync-service/internal/utils"
"github.com/tuusuario/go-sync-service/metrics"
"gorm.io/gorm"
)
func SyncData(redis ports.RedisConfigProvider, database *gorm.DB, job dto.CronJob) {
start := time.Now()
logPrefix := fmt.Sprintf("[🧩 Job: %s] ", job.Nombre)
config.Log.Printf("%s Iniciando sincronización...", logPrefix)
var dbcore ports.Database = db.NewGormDatabase(database)
var hasError bool
http.InitClient()
if len(job.Configuracion.Proceso) == 0 {
config.Log.Printf(" %s ⚠️ No hay procesos configurados para este job", logPrefix)
goto END
}
for _, proceso := range job.Configuracion.Proceso {
config.Log.Printf(logPrefix+" Iniciando proceso %s", proceso)
jobIndividual, err := utils.CargarDesdeRedis[dto.JobConfig](redis, proceso)
if err != nil {
config.Log.Printf(logPrefix+" ❌ Error al obtener configuración del proceso: %v", err)
hasError = true
continue
}
//Obtener Session
session, err := http.GetSession(redis, job, jobIndividual.Auth, dbcore, logPrefix)
if err != nil {
config.Log.Println(logPrefix + " ❌ No se pudo obtener sesión")
hasError = true
continue
}
if session == nil || session.SessionId == "" {
config.Log.Println(logPrefix + " ❌ Sesión inválida o vacía")
hasError = true
continue
}
if jobIndividual.Service.GQL {
jobIndividual.Service.Headers["Authorization"] = "Bearer " + session.SessionId
} else {
jobIndividual.Service.Headers["Cookie"] = "B1SESSION=" + session.SessionId
}
response, err := FetchAllPaginatedManual[map[string]interface{}](session.EndPoint, jobIndividual.Service, logPrefix)
if err != nil {
config.Log.Printf(logPrefix+" ❌ Error al obtener data: %v", err)
hasError = true
continue
}
config.Log.Printf("%s Cantidad de elementos: %v", logPrefix, len(*response))
err = dbcore.SyncRows(jobIndividual.Persistencia, response, job.UnidadNegocio.CompanyDB)
if err != nil {
config.Log.Printf(logPrefix+" ❌ Error al guardar en base de datos: %v", err)
hasError = true
}
}
END:
duration := time.Since(start).Seconds()
jobName := job.Nombre
metrics.CronDuration.WithLabelValues(jobName).Observe(duration)
if hasError {
metrics.CronError.WithLabelValues(jobName).Inc()
metrics.CronLastError.WithLabelValues(jobName).Set(float64(time.Now().Unix()))
} else {
metrics.CronSuccess.WithLabelValues(jobName).Inc()
metrics.CronLastSuccess.WithLabelValues(jobName).Set(float64(time.Now().Unix()))
}
config.Log.Printf("%s ⏱ Duración total: %.2fs", logPrefix, duration)
}
func FetchAllPaginatedManual[T any](host string, service dto.ServiceConfig, logPrefix string) (*[]T, error) {
var all []T
// REST paginación
if service.Rest != nil && service.Rest.Pagination != nil && service.Rest.Pagination.Enabled {
skip := service.Rest.Pagination.Skip
top := service.Rest.Pagination.Top
for {
// Actualizar parámetros de paginación
service.Rest.Query["$skip"] = strconv.Itoa(skip)
service.Rest.Query["$top"] = strconv.Itoa(top)
resp, err := http.SendRequest(host, service)
if err != nil {
return nil, fmt.Errorf("%s ❌ error en la petición: %w", logPrefix, err)
}
var result struct {
Value []T `json:"value"`
}
if err := json.Unmarshal(resp.Body(), &result); err != nil {
return nil, fmt.Errorf("%s ❌ error parseando respuesta: %w", logPrefix, err)
}
if len(result.Value) == 0 {
config.Log.Printf(" %s ❌ No hay más elementos", logPrefix)
break
}
all = append(all, result.Value...)
skip += top
}
return &all, nil
}
// GraphQL paginación
if service.GraphQL != nil && service.GraphQL.Pagination != nil && service.GraphQL.Pagination.Enabled {
for {
resp, err := http.SendRequest(host, service)
if err != nil {
return nil, fmt.Errorf("%s ❌ error en la petición: %w", logPrefix, err)
}
var raw map[string]any
if err := json.Unmarshal(resp.Body(), &raw); err != nil {
return nil, fmt.Errorf("%s ❌ error parseando respuesta GraphQL: %w", logPrefix, err)
}
data, ok := raw["data"].(map[string]interface{})
if !ok {
return nil, fmt.Errorf("%s ❌ no se encontró 'data' en la respuesta GraphQL", logPrefix)
}
root, ok := data[service.GraphQL.RootField].(map[string]interface{})
if !ok {
return nil, fmt.Errorf("%s ❌ no se encontró '%v' en la respuesta GraphQL", logPrefix, service.GraphQL.RootField)
}
// Obtener y parsear filas
rows, ok := root[service.GraphQL.RowField].([]interface{})
if !ok {
return nil, fmt.Errorf("%s ❌ no se encontró '%v' en la respuesta GraphQL", logPrefix, service.GraphQL.RowField)
}
for _, r := range rows {
jsonRow, _ := json.Marshal(r)
var item T
if err := json.Unmarshal(jsonRow, &item); err != nil {
config.Log.Printf("%s ⚠️ error parseando fila: %v", logPrefix, err)
continue
}
all = append(all, item)
}
// Evaluar paginación
meta, ok := root["meta"].(map[string]interface{})
if !ok {
return nil, fmt.Errorf("%s ❌ no se encontró 'meta' para paginación", logPrefix)
}
hasNext, ok := meta["hasNextPage"].(bool)
if !ok || !hasNext {
break
}
// Avanzar cursor (página)
if nextPage, ok := meta["next"]; ok {
service.GraphQL.Variables[service.GraphQL.Pagination.CursorParam] = nextPage
} else {
break // no hay campo next
}
}
return &all, nil
}
// Sin paginación
resp, err := http.SendRequest(host, service)
if err != nil {
return nil, fmt.Errorf("%s ❌ error en la petición: %w", logPrefix, err)
}
var result struct {
Value []T `json:"value"`
}
if err := json.Unmarshal(resp.Body(), &result); err != nil {
return nil, fmt.Errorf("%s ❌ error parseando respuesta final: %w", logPrefix, err)
}
all = append(all, result.Value...)
return &all, nil
}

View File

@ -0,0 +1,29 @@
package utils
import (
"encoding/json"
"fmt"
"github.com/tuusuario/go-sync-service/internal/config"
"github.com/tuusuario/go-sync-service/internal/domain/ports"
)
// Función genérica para deserializar un JSON de Redis a cualquier tipo
func CargarDesdeRedis[T any](cfg ports.RedisConfigProvider, clave string) (*T, error) {
data, err := cfg.GetString(clave)
if err != nil {
config.Log.Errorf("⚠️ error al obtener config de Redis [%s]: %s", clave, err)
return nil, fmt.Errorf("error al obtener clave [%s] de redis: %w", clave, err)
}
config.Log.Debugf("🔑 Clave [%s] obtenida de Redis: %s", clave, data)
var result T
if err := json.Unmarshal([]byte(data), &result); err != nil {
config.Log.Errorf("❌ error al parsear JSON [%s]: %s", clave, err)
return nil, fmt.Errorf("error al parsear JSON [%s]: %w", clave, err)
}
return &result, nil
}

1748
logs/syncronizador.log Normal file

File diff suppressed because one or more lines are too long

56
metrics/metrics.go Normal file
View File

@ -0,0 +1,56 @@
// metrics/metrics.go
package metrics
import "github.com/prometheus/client_golang/prometheus"
var (
// Cuántas veces se ejecutó exitosamente cada job
CronSuccess = prometheus.NewCounterVec(
prometheus.CounterOpts{
Name: "cron_job_success_total",
Help: "Total de ejecuciones exitosas por cron job",
},
[]string{"job"},
)
// Cuántas veces falló
CronError = prometheus.NewCounterVec(
prometheus.CounterOpts{
Name: "cron_job_error_total",
Help: "Total de errores por cron job",
},
[]string{"job"},
)
// Tiempo de ejecución de cada job
CronDuration = prometheus.NewHistogramVec(
prometheus.HistogramOpts{
Name: "cron_job_duration_seconds",
Help: "Duración de ejecución por cron job",
Buckets: prometheus.DefBuckets,
},
[]string{"job"},
)
// Última ejecución exitosa (timestamp Unix)
CronLastSuccess = prometheus.NewGaugeVec(
prometheus.GaugeOpts{
Name: "cron_job_last_success_timestamp",
Help: "Timestamp de la última ejecución exitosa por cron job",
},
[]string{"job"},
)
// Último error (timestamp Unix)
CronLastError = prometheus.NewGaugeVec(
prometheus.GaugeOpts{
Name: "cron_job_last_error_timestamp",
Help: "Timestamp del último error por cron job",
},
[]string{"job"},
)
)
func Register() {
prometheus.MustRegister(CronSuccess, CronError, CronDuration, CronLastSuccess, CronLastError)
}