mirror of
https://github.com/GRFreire/nthmail.git
synced 2026-01-08 12:29:38 +00:00
parse mail subject when receiving and saving to db
this way, when the inbox route is called, there is no need to parse all mails neither request them from db just so it can have their subject
This commit is contained in:
parent
b01e6776dc
commit
a88e5e90dc
@ -41,5 +41,4 @@ Available env variables:
|
||||
- Restart when either mail or web server dies
|
||||
- Handle attachments
|
||||
- Do not store the raw mail data in the DB, maybe use block storage (the provider can be a disk provider at first)
|
||||
- Cache subject parsed from email. Then when listing the email it is not necessary to parse all mails and retrieve them.
|
||||
- Cache in general?
|
||||
|
||||
@ -5,5 +5,6 @@ CREATE TABLE mails (
|
||||
arrived_at integer not null,
|
||||
rcpt_addr text not null,
|
||||
from_addr text not null,
|
||||
subject text,
|
||||
data blob not null
|
||||
);
|
||||
|
||||
@ -10,6 +10,7 @@ import (
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/GRFreire/nthmail/pkg/mail_utils"
|
||||
"github.com/emersion/go-smtp"
|
||||
_ "github.com/mattn/go-sqlite3"
|
||||
)
|
||||
@ -58,13 +59,19 @@ func (session *Session) Data(reader io.Reader) error {
|
||||
return err
|
||||
} else {
|
||||
|
||||
stmt, err := session.tx.Prepare("INSERT INTO mails (arrived_at, rcpt_addr, from_addr, data) VALUES (?, ?, ?, ?)")
|
||||
stmt, err := session.tx.Prepare("INSERT INTO mails (arrived_at, rcpt_addr, from_addr, subject, data) VALUES (?, ?, ?, ?, ?)")
|
||||
if err != nil {
|
||||
println(err)
|
||||
return err
|
||||
}
|
||||
defer stmt.Close()
|
||||
|
||||
_, err = stmt.Exec(session.arrived_at, session.rcpt, session.from, bytes)
|
||||
mail_obj, err := mail_utils.Parse_mail(bytes, true)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = stmt.Exec(session.arrived_at, session.rcpt, session.from, mail_obj.Subject, bytes)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@ -59,6 +59,12 @@ type ServerResouces struct {
|
||||
domain string
|
||||
}
|
||||
|
||||
type db_mail_header struct {
|
||||
Id int
|
||||
Arrived_at int64
|
||||
Rcpt_addr, From_addr string
|
||||
Subject string
|
||||
}
|
||||
type db_mail struct {
|
||||
Id int
|
||||
Arrived_at int64
|
||||
@ -105,7 +111,7 @@ func (sr ServerResouces) handleInbox(res http.ResponseWriter, req *http.Request)
|
||||
}
|
||||
defer tx.Commit()
|
||||
|
||||
stmt, err := tx.Prepare("SELECT mails.id, mails.arrived_at, mails.rcpt_addr, mails.from_addr, mails.data FROM mails WHERE mails.rcpt_addr = ?")
|
||||
stmt, err := tx.Prepare("SELECT mails.id, mails.arrived_at, mails.rcpt_addr, mails.from_addr, mails.subject FROM mails WHERE mails.rcpt_addr = ?")
|
||||
if err != nil {
|
||||
res.WriteHeader(500)
|
||||
res.Write([]byte("internal server error"))
|
||||
@ -127,8 +133,8 @@ func (sr ServerResouces) handleInbox(res http.ResponseWriter, req *http.Request)
|
||||
|
||||
var mails []mail_utils.Mail_obj
|
||||
for rows.Next() {
|
||||
var m db_mail
|
||||
err = rows.Scan(&m.Id, &m.Arrived_at, &m.Rcpt_addr, &m.From_addr, &m.Data)
|
||||
var m db_mail_header
|
||||
err = rows.Scan(&m.Id, &m.Arrived_at, &m.Rcpt_addr, &m.From_addr, &m.Subject)
|
||||
if err != nil {
|
||||
res.WriteHeader(500)
|
||||
res.Write([]byte("internal server error"))
|
||||
@ -137,17 +143,12 @@ func (sr ServerResouces) handleInbox(res http.ResponseWriter, req *http.Request)
|
||||
return
|
||||
}
|
||||
|
||||
mail_obj, err := mail_utils.Parse_mail(m.Data, true)
|
||||
mail_obj.Date = time.Unix(m.Arrived_at, 0)
|
||||
var mail_obj mail_utils.Mail_obj
|
||||
mail_obj.Id = m.Id
|
||||
if err != nil {
|
||||
res.WriteHeader(500)
|
||||
res.Write([]byte("internal server error"))
|
||||
|
||||
log.Println("could not parse mail")
|
||||
log.Println(err)
|
||||
return
|
||||
}
|
||||
mail_obj.Date = time.Unix(m.Arrived_at, 0)
|
||||
mail_obj.To = m.Rcpt_addr
|
||||
mail_obj.From = m.From_addr
|
||||
mail_obj.Subject = m.Subject
|
||||
|
||||
mails = append(mails, mail_obj)
|
||||
}
|
||||
|
||||
Loading…
Reference in New Issue
Block a user