Compare commits

...

5 Commits

Author SHA1 Message Date
37e17504b2 minor 2024-01-17 12:19:48 -05:00
2bceb9c033 minor 2024-01-14 20:10:16 -05:00
82a06046b0 minor comment removal 2024-01-01 21:26:11 -05:00
62a412b465 removal of tags 2023-12-27 11:10:58 -05:00
6241b57172 big oof 2023-12-27 00:12:44 -05:00
2 changed files with 35 additions and 18 deletions

View File

@@ -1,3 +1,3 @@
# telegram_groupmirror ## telegram_groupmirror
Mirrors one of more telegram groups or channels to 1 or more groups or channels. Mirrors one of more telegram groups or channels to 1 or more groups or channels.

49
bot.py
View File

@@ -6,7 +6,7 @@ from telethon import TelegramClient, events
from telethon.tl.types import InputChannel from telethon.tl.types import InputChannel
from telethon.errors import FloodWaitError from telethon.errors import FloodWaitError
import asyncio, json, time, sys, yaml, os, time import asyncio, json, time, sys, yaml, os, time, re
from apscheduler.schedulers.asyncio import AsyncIOScheduler from apscheduler.schedulers.asyncio import AsyncIOScheduler
from datetime import ( from datetime import (
datetime, datetime,
@@ -20,6 +20,14 @@ job_defaults = {
'max_instances': 1 'max_instances': 1
} }
# Regexp patterns to remove from group
patterns = [
'https:\/\/t.me\/\w*',
'@[Ww]hite[Aa]ction'
]
combined_patterns = r'|'.join(map(r'(?:{})'.format, patterns))
# Start Scheduler # Start Scheduler
scheduler = AsyncIOScheduler(job_defaults=job_defaults) scheduler = AsyncIOScheduler(job_defaults=job_defaults)
@@ -56,7 +64,7 @@ async def _indexer(client, config):
start_time = time.time() start_time = time.time()
for input_channel in input_channels_entities: for input_channel in input_channels_entities:
async for message in client.iter_messages(input_channel, limit=None, likewhoareverse=True, wait_time=10): async for message in client.iter_messages(input_channel, limit=None, reverse=True, wait_time=10):
try: try:
for output_channel in output_channel_entities: for output_channel in output_channel_entities:
# Process attachments # Process attachments
@@ -65,29 +73,38 @@ async def _indexer(client, config):
path = await client.download_media(message.media, file='usermedia/', progress_callback=callback) path = await client.download_media(message.media, file='usermedia/', progress_callback=callback)
if path is not None: if path is not None:
logging.info(f'Sending file to {output_channel}.') if message.text:
await client.send_file( logging.info(f'Sending file to {output_channel}.')
output_channel, txt = re.sub(combined_patterns, '', message.text)
path,
caption=message.text await client.send_file(
) output_channel,
path,
caption=txt
)
else:
await client.send_file(
output_channel,
path
)
# Remove file after upload # Remove file after upload
os.remove(path) os.remove(path)
else: elif message.text:
if message.text: logging.info(f'Sending message to {output_channel}..')
logging.info(f'Sending message to {output_channel}..') txt = re.sub(combined_patterns, '', message.text)
await client.send_message( await client.send_message(
output_channel, output_channel,
message.text txt
) )
elif message.text: elif message.text:
logging.info(f'Sending message to {output_channel}.') logging.info(f'Sending message to {output_channel}.')
txt = re.sub(combined_patterns, '', message.text)
await client.send_message( await client.send_message(
output_channel, output_channel,
message.text txt
) )
except FloodWaitError as e: except FloodWaitError as e:
logging.error(f'Flood wait for {e.seconds} for _indexer.') logging.error(f'Flood wait for {e.seconds} for _indexer.')