Fix DB && Logs to Stdout in Docker
All checks were successful
Build and push Docker image at git tag / build (push) Successful in 1m54s

This commit is contained in:
Magnus Bender 2024-10-08 14:11:57 +02:00
parent afc35be35a
commit e376956def
Signed by: bender
GPG Key ID: 5149A211831F2BD7
2 changed files with 15 additions and 9 deletions

View File

@ -29,6 +29,11 @@ autostart=true
autorestart=true
priority=10
stderr_logfile=/dev/stderr
stderr_logfile_maxbytes=0
stdout_logfile=/dev/stdout
stdout_logfile_maxbytes=0
[program:nginx]
command=/usr/sbin/nginx -g 'daemon off;'
autostart=true

View File

@ -62,16 +62,16 @@ class DB():
self.db = sqlite3.connect(
os.path.join(PERSIST_PATH, 'messages.db'),
check_same_thread=False
)
)
self.db.row_factory = sqlite3.Row
self.dblock = Lock()
atexit.register(lambda db : db.close(), self.db)
self.db_lock = Lock()
self._assure_tables()
def _assure_tables(self):
self.dblock.acquire()
self.db_lock.acquire()
with self.db:
self.db.execute("""CREATE TABLE IF NOT EXISTS Messages (
count INTEGER PRIMARY KEY AUTOINCREMENT,
@ -82,11 +82,11 @@ class DB():
json BLOB,
processed BOOL DEFAULT FALSE
)""")
self.dblock.release()
self.db_lock.release()
@validate_call
def add_message(self, sender:str, recipient:str, message:AgentMessage, processed:bool=False) -> int:
self.dblock.acquire()
self.db_lock.acquire()
with self.db:
self.db.execute(
"""INSERT INTO Messages (
@ -101,20 +101,21 @@ class DB():
"processed" : processed
})
new_count = self.db.execute("SELECT LAST_INSERT_ROWID() as last").fetchone()
self.dblock.release()
self.db_lock.release()
return new_count['last']
@validate_call
def set_processed(self, count:int, processed:bool=True) -> bool:
self.dblock.acquire()
self.db_lock.acquire()
with self.db:
try:
self.db.execute("UPDATE Messages SET processed = ? WHERE count = ?", (processed, count))
return True
except:
return False
self.dblock.release()
finally:
self.db_lock.release()
def __iter__(self) -> Generator[RowObject, None, None]:
yield from self.iterate()