Browse Source

[IMP] override fetch_mail and if is defined last_internal_date fetch all message from this date to today

and call message_process, only for imap server type
pull/932/head
archetipo 10 years ago
committed by Jordi Ballester
parent
commit
7d23c68f66
  1. 189
      fetchmail_bydate/model/fetchmail.py

189
fetchmail_bydate/model/fetchmail.py

@ -22,13 +22,10 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
##############################################################################
from openerp.osv import fields, orm
from openerp.tools import (
DEFAULT_SERVER_DATETIME_FORMAT as DSDTF)
import logging
import imaplib
from datetime import datetime
import time
import calendar
_logger = logging.getLogger(__name__)
@ -51,14 +48,12 @@ class FetchmailServer(orm.Model):
date_uids = {}
last_date = False
last_internal_date = datetime.strptime(
server.last_internal_date, DSDTF)
timestamp1 = calendar.timegm(
last_internal_date.timetuple())
intDate = imaplib.Time2Internaldate(timestamp1)
server.last_internal_date, "%Y-%m-%d %H:%M:%S")
#~ timestamp1 = time.mktime(last_internal_date.timetuple())
#~ intDate = imaplib.Time2Internaldate(timestamp1)
search_status, uids = imap_server.search(
None,
'SINCE',
'%s' % intDate
'SINCE', '%s' % last_internal_date.strftime('%d-%b-%Y')
)
new_uids = uids[0].split()
for new_uid in new_uids:
@ -76,93 +71,15 @@ class FetchmailServer(orm.Model):
for num in messages:
# SEARCH command *always* returns at least the most
# recent message, even if it has already been synced
res_id = None
result, data = imap_server.uid('fetch', num,
'(RFC822)')
imap_server.store(num, '-FLAGS', '\\Seen')
try:
res_id = mail_thread.message_process(
cr, uid,
server.object_id.model,
data[0][1],
save_original=server.original,
strip_attachments=(not server.attach),
context=context)
except Exception:
_logger.exception(
'Failed to process mail \
from %s server %s.',
server.type,
server.name)
failed += 1
if res_id and server.action_id:
action_pool.run(cr, uid, [server.action_id.id],
{'active_id': res_id,
'active_ids': [res_id],
'active_model': context.get(
"thread_model",
server.object_id.model)}
)
imap_server.store(num, '+FLAGS', '\\Seen')
cr.commit()
count += 1
last_date = not failed and date_uids[num] or False
return count, failed, last_date
def _fetch_unread_imap(self, cr, uid,
server, imap_server,
mail_thread, action_pool,
count, failed,
context=None):
result, data = imap_server.search(None, '(UNSEEN)')
for num in data[0].split():
res_id = None
result, data = imap_server.fetch(num, '(RFC822)')
imap_server.store(num, '-FLAGS', '\\Seen')
try:
res_id = mail_thread.message_process(
cr, uid, server.object_id.model,
data[0][1],
save_original=server.original,
strip_attachments=(not server.attach),
context=context)
except Exception:
_logger.exception(
'Failed to process mail \
from %s server %s.',
server.type,
server.name)
failed += 1
if res_id and server.action_id:
action_pool.run(cr, uid,
[server.action_id.id],
{'active_id': res_id,
'active_ids': [res_id],
'active_model': context.get(
"thread_model",
server.object_id.model)}
)
imap_server.store(num, '+FLAGS', '\\Seen')
cr.commit()
count += 1
return count, failed
def _fetch_unread_pop(self, cr, uid,
server, mail_thread,
failed, action_pool,
context=None):
try:
pop_server = server.connect()
(numMsgs, totalSize) = pop_server.stat()
pop_server.list()
for num in range(1, numMsgs + 1):
(header, msges, octets) = pop_server.retr(num)
msg = '\n'.join(msges)
res_id = None
if data and data[0]:
try:
res_id = mail_thread.message_process(
cr, uid, server.object_id.model,
msg,
cr, uid,
server.object_id.model,
data[0][1],
save_original=server.original,
strip_attachments=(not server.attach),
context=context)
@ -174,32 +91,20 @@ class FetchmailServer(orm.Model):
server.name)
failed += 1
if res_id and server.action_id:
action_pool.run(cr, uid, [server.action_id.id],
{'active_id': res_id,
'active_ids': [res_id],
'active_model': context.get(
"thread_model",
server.object_id.model)}
)
pop_server.dele(num)
cr.commit()
_logger.info(
"Fetched %d email(s) on %s server %s; \
%d succeeded, %d failed.",
numMsgs,
server.type,
server.name,
(numMsgs - failed),
failed)
except Exception:
_logger.exception(
"General failure when trying to fetch \
mail from %s server %s.",
server.type,
server.name)
finally:
if pop_server:
pop_server.quit()
action_pool.run(
cr, uid, [server.action_id.id],
{
'active_id': res_id,
'active_ids': [res_id],
'active_model': context.get(
"thread_model",
server.object_id.model)
}, context=context)
imap_server.store(num, '+FLAGS', '\\Seen')
cr.commit()
count += 1
last_date = not failed and date_uids[num] or False
return count, failed, last_date
def fetch_mail(self, cr, uid, ids, context=None):
"""WARNING: meant for cron usage only -
@ -211,8 +116,9 @@ class FetchmailServer(orm.Model):
mail_thread = self.pool.get('mail.thread')
action_pool = self.pool.get('ir.actions.server')
for server in self.browse(cr, uid, ids, context=context):
_logger.info('start checking for new emails on %s server %s',
server.type, server.name)
_logger.info(
'start checking for new emails by date on %s server %s',
server.type, server.name)
context.update({'fetchmail_server_id': server.id,
'server_type': server.type})
count, failed = 0, 0
@ -224,33 +130,9 @@ class FetchmailServer(orm.Model):
imap_server.select()
if server.last_internal_date:
count, failed, last_date = self._fetch_from_data_imap(
cr,
uid,
server,
imap_server,
mail_thread,
action_pool,
count,
failed,
context=context)
count, failed = self._fetch_unread_imap(
cr,
uid,
server,
imap_server,
mail_thread,
action_pool,
count,
failed,
context=context)
_logger.info(
"Fetched %d email(s) on %s server %s; \
%d succeeded, %d failed.",
count,
server.type,
server.name,
(count - failed),
failed)
cr, uid, server, imap_server, mail_thread,
action_pool, count, failed, context=context
)
except Exception:
_logger.exception(
"General failure when trying to fetch mail \
@ -262,15 +144,8 @@ class FetchmailServer(orm.Model):
if imap_server:
imap_server.close()
imap_server.logout()
elif server.type == 'pop':
self._fetch_unread_pop(cr, uid,
server, mail_thread,
failed, action_pool,
context=context)
vals = {'date':
time.strftime(DSDTF)
}
if last_date:
vals['last_internal_date'] = last_date
server.write(vals)
return True
vals = {'last_internal_date': last_date}
server.write(vals)
return super(FetchmailServer, self).fetch_mail(
cr, uid, ids, context=context)
Loading…
Cancel
Save