diff --git a/.gitignore b/.gitignore
index b651b32..86facd3 100644
--- a/.gitignore
+++ b/.gitignore
@@ -10,3 +10,4 @@ data*
*.pickle
authorized_chats.txt
log.txt
+accounts/*
\ No newline at end of file
diff --git a/Dockerfile b/Dockerfile
index 0b26e2d..7cc40ae 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,16 +1,20 @@
-FROM ubuntu:18.04
+FROM python:3-alpine
+
+WORKDIR /bot
+RUN chmod 777 /bot
+
+# install ca-certificates so that HTTPS works consistently
+RUN apk add --no-cache --update ca-certificates aria2 libmagic
+
+RUN apk add --no-cache --update --virtual .build-deps \
+ build-base \
+ libffi-dev \
+ openssl-dev
-WORKDIR /usr/src/app
-RUN chmod 777 /usr/src/app
-RUN apt -qq update
-RUN apt -qq install -y aria2 python3 python3-pip locales
COPY requirements.txt .
RUN pip3 install --no-cache-dir -r requirements.txt
+RUN apk del .build-deps
COPY . .
-RUN chmod +x aria.sh
-RUN locale-gen en_US.UTF-8
-ENV LANG en_US.UTF-8
-ENV LANGUAGE en_US:en
-ENV LC_ALL en_US.UTF-8
+RUN chmod +x *.sh
-CMD ["bash","start.sh"]
+CMD ["sh", "./start.sh"]
diff --git a/README.md b/README.md
index d92f9c4..5e1681e 100644
--- a/README.md
+++ b/README.md
@@ -84,3 +84,34 @@ sudo docker build . -t mirror-bot
```
sudo docker run mirror-bot
```
+
+## Using service accounts for uploading to avoid user rate limit
+
+Many thanks to [AutoRClone](https://github.com/xyou365/AutoRclone) for the scripts
+### Generating service accounts
+Step 1. Generate service accounts [What is service account](https://cloud.google.com/iam/docs/service-accounts) [How to use service account in rclone](https://rclone.org/drive/#service-account-support).
+---------------------------------
+Let us create only the service accounts that we need.
+**Warning:** abuse of this feature is not the aim of autorclone and we do **NOT** recommend that you make a lot of projects, just one project and 100 sa allow you plenty of use, its also possible that overabuse might get your projects banned by google.
+
+```
+Note: 1 service account can copy around 750gb a day, 1 project makes 100 service accounts so thats 75tb a day, for most users this should easily suffice.
+```
+
+`python3 gen_sa_accounts.py --quick-setup 1 --new-only`
+
+A folder named accounts will be created which will contain keys for the service accounts created
+```
+We highly recommend to zip this folder and store it somewhere safe, so that you do not have to create a new project everytime you want to deploy the bot
+```
+### Adding service accounts to Google Groups:
+We use Google Groups to manager our service accounts considering the
+[Official limits to the members of Team Drive](https://support.google.com/a/answer/7338880?hl=en) (Limit for individuals and groups directly added as members: 600).
+
+1. Turn on the Directory API following [official steps](https://developers.google.com/admin-sdk/directory/v1/quickstart/python) (save the generated json file to folder `credentials`).
+
+2. Create group for your organization [in the Admin console](https://support.google.com/a/answer/33343?hl=en). After create a group, you will have an address for example`sa@yourdomain.com`.
+
+3. Run `python3 add_to_google_group.py -g sa@yourdomain.com`
+
+4. Now, add Google Groups (**Step 2**) to manager your service accounts, add the group address `sa@yourdomain.com` or `sa@googlegroups.com` to the Team drive or folder
diff --git a/add_to_google_group.py b/add_to_google_group.py
new file mode 100644
index 0000000..aaed28b
--- /dev/null
+++ b/add_to_google_group.py
@@ -0,0 +1,84 @@
+# auto rclone
+# Add service accounts to groups for your organization
+#
+# Author Telegram https://t.me/CodyDoby
+# Inbox codyd@qq.com
+
+from __future__ import print_function
+
+import os
+import pickle
+
+import argparse
+import glob
+import googleapiclient.discovery
+import json
+import progress.bar
+import time
+from google.auth.transport.requests import Request
+from google_auth_oauthlib.flow import InstalledAppFlow
+
+stt = time.time()
+
+parse = argparse.ArgumentParser(
+ description='A tool to add service accounts to groups for your organization from a folder containing credential '
+ 'files.')
+parse.add_argument('--path', '-p', default='accounts',
+ help='Specify an alternative path to the service accounts folder.')
+parse.add_argument('--credentials', '-c', default='credentials/credentials.json',
+ help='Specify the relative path for the controller file.')
+parsereq = parse.add_argument_group('required arguments')
+# service-account@googlegroups.com
+parsereq.add_argument('--groupaddr', '-g', help='The address of groups for your organization.', required=True)
+
+args = parse.parse_args()
+acc_dir = args.path
+gaddr = args.groupaddr
+credentials = glob.glob(args.credentials)
+
+creds = None
+if os.path.exists('credentials/token.pickle'):
+ with open('credentials/token.pickle', 'rb') as token:
+ creds = pickle.load(token)
+# If there are no (valid) credentials available, let the user log in.
+if not creds or not creds.valid:
+ if creds and creds.expired and creds.refresh_token:
+ creds.refresh(Request())
+ else:
+ flow = InstalledAppFlow.from_client_secrets_file(credentials[0], scopes=[
+ 'https://www.googleapis.com/auth/admin.directory.group',
+ 'https://www.googleapis.com/auth/admin.directory.group.member'
+ ])
+ # creds = flow.run_local_server(port=0)
+ creds = flow.run_console()
+ # Save the credentials for the next run
+ with open('credentials/token.pickle', 'wb') as token:
+ pickle.dump(creds, token)
+
+group = googleapiclient.discovery.build("admin", "directory_v1", credentials=creds)
+
+print(group.members())
+
+batch = group.new_batch_http_request()
+
+sa = glob.glob('%s/*.json' % acc_dir)
+
+# sa = sa[0:5]
+
+pbar = progress.bar.Bar("Readying accounts", max=len(sa))
+for i in sa:
+ ce = json.loads(open(i, 'r').read())['client_email']
+
+ body = {"email": ce, "role": "MEMBER"}
+ batch.add(group.members().insert(groupKey=gaddr, body=body))
+ # group.members().insert(groupKey=gaddr, body=body).execute()
+
+ pbar.next()
+pbar.finish()
+print('Adding...')
+batch.execute()
+
+print('Complete.')
+hours, rem = divmod((time.time() - stt), 3600)
+minutes, sec = divmod(rem, 60)
+print("Elapsed Time:\n{:0>2}:{:0>2}:{:05.2f}".format(int(hours), int(minutes), sec))
diff --git a/bot/__init__.py b/bot/__init__.py
index 9034147..e2ca270 100644
--- a/bot/__init__.py
+++ b/bot/__init__.py
@@ -1,11 +1,15 @@
import logging
-import aria2p
-import threading
import os
-from dotenv import load_dotenv
-import telegram.ext as tg
+import threading
import time
+import aria2p
+import telegram.ext as tg
+from dotenv import load_dotenv
+import socket
+
+socket.setdefaulttimeout(600)
+
botStartTime = time.time()
if os.path.exists('log.txt'):
with open('log.txt', 'r+') as f:
@@ -28,7 +32,7 @@ def getConfig(name: str):
try:
if bool(getConfig('_____REMOVE_THIS_LINE_____')):
- logging.ERROR('The README.md file there to be read! Exiting now!')
+ logging.error('The README.md file there to be read! Exiting now!')
exit()
except KeyError:
pass
@@ -80,13 +84,22 @@ def getConfig(name: str):
INDEX_URL = None
try:
IS_TEAM_DRIVE = getConfig('IS_TEAM_DRIVE')
- if IS_TEAM_DRIVE == 'True' or IS_TEAM_DRIVE == 'true':
+ if IS_TEAM_DRIVE.lower() == 'true':
IS_TEAM_DRIVE = True
else:
IS_TEAM_DRIVE = False
-
except KeyError:
IS_TEAM_DRIVE = False
-updater = tg.Updater(token=BOT_TOKEN)
+
+try:
+ USE_SERVICE_ACCOUNTS = getConfig('USE_SERVICE_ACCOUNTS')
+ if USE_SERVICE_ACCOUNTS.lower() == 'true':
+ USE_SERVICE_ACCOUNTS = True
+ else:
+ USE_SERVICE_ACCOUNTS = False
+except KeyError:
+ USE_SERVICE_ACCOUNTS = False
+
+updater = tg.Updater(token=BOT_TOKEN,use_context=True)
bot = updater.bot
dispatcher = updater.dispatcher
diff --git a/bot/__main__.py b/bot/__main__.py
index 0c0d2d6..91db40c 100644
--- a/bot/__main__.py
+++ b/bot/__main__.py
@@ -8,10 +8,11 @@
import shutil
from .helper.telegram_helper.filters import CustomFilters
from bot.helper.telegram_helper.bot_commands import BotCommands
-from .modules import authorize, list, cancel_mirror, mirror_status, mirror
+from .modules import authorize, list, cancel_mirror, mirror_status, mirror, clone, watch
+
@run_async
-def stats(bot,update):
+def stats(update,context):
currentTime = get_readable_time((time.time() - botStartTime))
total, used, free = shutil.disk_usage('.')
total = get_readable_file_size(total)
@@ -19,33 +20,37 @@ def stats(bot,update):
free = get_readable_file_size(free)
stats = f'Bot Uptime: {currentTime}\n' \
f'Total disk space: {total}\n' \
- f'Used: {used}\n' \
- f'Free: {free}'
+ f'Used: {used}\n' \
+ f'Free: {free}'
sendMessage(stats, bot, update)
-
@run_async
-def start(bot,update):
+def start(update,context):
sendMessage("This is a bot which can mirror all your links to Google drive!\n"
- "Type /help to get a list of available commands", bot, update)
+ "Type /help to get a list of available commands", context.bot, update)
@run_async
-def ping(bot,update):
+def ping(update,context):
start_time = int(round(time.time() * 1000))
- reply = sendMessage("Starting Ping", bot, update)
+ reply = sendMessage("Starting Ping", context.bot, update)
end_time = int(round(time.time()*1000))
editMessage(f'{end_time - start_time} ms',reply)
@run_async
-def log(bot,update):
- sendLogFile(bot, update)
+def log(update,context):
+ sendLogFile(context.bot, update)
@run_async
-def bot_help(bot,update):
+def log(update,context):
+ sendLogFile(context.bot, update)
+
+
+@run_async
+def bot_help(update, context):
help_string = f'''
/{BotCommands.HelpCommand}: To get this message
@@ -53,6 +58,10 @@ def bot_help(bot,update):
/{BotCommands.TarMirrorCommand} [download_url][magnet_link]: start mirroring and upload the archived (.tar) version of the download
+/{BotCommands.WatchCommand} [youtube-dl supported link]: Mirror through youtube-dl
+
+/{BotCommands.TarWatchCommand} [youtube-dl supported link]: Mirror through youtube-dl and tar before uploading
+
/{BotCommands.CancelMirror} : Reply to the message by which the download was initiated and that download will be cancelled
/{BotCommands.StatusCommand}: Shows a status of all the downloads
@@ -66,7 +75,7 @@ def bot_help(bot,update):
/{BotCommands.LogCommand}: Get a log file of the bot. Handy for getting crash reports
'''
- sendMessage(help_string, bot, update)
+ sendMessage(help_string, context.bot, update)
def main():
@@ -78,7 +87,7 @@ def main():
help_handler = CommandHandler(BotCommands.HelpCommand,
bot_help, filters=CustomFilters.authorized_chat | CustomFilters.authorized_user)
stats_handler = CommandHandler(BotCommands.StatsCommand,
- stats, filters=CustomFilters.authorized_chat | CustomFilters.authorized_user)
+ stats, filters=CustomFilters.authorized_chat | CustomFilters.authorized_user)
log_handler = CommandHandler(BotCommands.LogCommand, log, filters=CustomFilters.owner_filter)
dispatcher.add_handler(start_handler)
dispatcher.add_handler(ping_handler)
diff --git a/bot/helper/ext_utils/bot_utils.py b/bot/helper/ext_utils/bot_utils.py
index 9f699c7..2ef75eb 100644
--- a/bot/helper/ext_utils/bot_utils.py
+++ b/bot/helper/ext_utils/bot_utils.py
@@ -54,6 +54,14 @@ def get_readable_file_size(size_in_bytes) -> str:
except IndexError:
return 'File too large'
+def getDownloadByGid(gid):
+ with download_dict_lock:
+ for dl in download_dict.values():
+ if dl.status() == MirrorStatus.STATUS_DOWNLOADING:
+ if dl.download().gid == gid:
+ return dl
+ return None
+
def get_progress_bar_string(status):
completed = status.processed_bytes() / 8
@@ -90,7 +98,13 @@ def get_readable_message():
if download.status() != MirrorStatus.STATUS_ARCHIVING:
msg += f"\n{get_progress_bar_string(download)} {download.progress()} of " \
f"{download.size()}" \
- f" at {download.speed()}, ETA: {download.eta()}\n\n"
+ f" at {download.speed()}, ETA: {download.eta()} "
+ if download.status() == MirrorStatus.STATUS_DOWNLOADING:
+ if hasattr(download,'is_torrent'):
+ msg += f"| P: {download.download().connections} " \
+ f"| S: {download.download().num_seeders}"
+ msg += f"\nGID: {download.download().gid}"
+ msg += "\n\n"
return msg
diff --git a/bot/helper/ext_utils/fs_utils.py b/bot/helper/ext_utils/fs_utils.py
index ad3b40f..dd534b7 100644
--- a/bot/helper/ext_utils/fs_utils.py
+++ b/bot/helper/ext_utils/fs_utils.py
@@ -4,6 +4,7 @@
import os
import pathlib
import mimetypes
+import tarfile
def clean_download(path: str):
@@ -29,13 +30,25 @@ def exit_clean_up(signal, frame):
LOGGER.warning("Force Exiting before the cleanup finishes!")
sys.exit(1)
-
-def tar(orig_path: str):
- path = pathlib.PurePath(orig_path)
- base = path.name
- root = pathlib.Path(path.parent.as_posix()).absolute().as_posix()
- LOGGER.info(f'Tar: orig_path: {orig_path}, base: {base}, root: {root}')
- return shutil.make_archive(orig_path, 'tar', root, base)
+def get_path_size(path):
+ if os.path.isfile(path):
+ return os.path.getsize(path)
+ total_size = 0
+ for root, dirs, files in os.walk(path):
+ for f in files:
+ abs_path = os.path.join(root, f)
+ total_size += os.path.getsize(abs_path)
+ return total_size
+
+
+def tar(org_path):
+ tar_path = org_path + ".tar"
+ path = pathlib.PurePath(org_path)
+ LOGGER.info(f'Tar: orig_path: {org_path}, tar_path: {tar_path}')
+ tar = tarfile.open(tar_path, "w")
+ tar.add(org_path,arcname=path.name)
+ tar.close()
+ return tar_path
def get_mime_type(file_path):
diff --git a/bot/helper/mirror_utils/download_utils/aria2_download.py b/bot/helper/mirror_utils/download_utils/aria2_download.py
index 974218d..5063fd7 100644
--- a/bot/helper/mirror_utils/download_utils/aria2_download.py
+++ b/bot/helper/mirror_utils/download_utils/aria2_download.py
@@ -1,10 +1,12 @@
+from aria2p import API
+from aria2p.client import ClientException
+
from bot import aria2
from bot.helper.ext_utils.bot_utils import *
-from .download_helper import DownloadHelper
from bot.helper.mirror_utils.status_utils.aria_download_status import AriaDownloadStatus
from bot.helper.telegram_helper.message_utils import *
-import threading
-from aria2p import API
+from .download_helper import DownloadHelper
+
class AriaDownloadHelper(DownloadHelper):
@@ -26,7 +28,9 @@ def __onDownloadComplete(self, api: API, gid):
if self.gid == gid:
if api.get_download(gid).followed_by_ids:
self.gid = api.get_download(gid).followed_by_ids[0]
- download_dict[self._listener.uid] = AriaDownloadStatus(self.gid, self._listener)
+ with download_dict_lock:
+ download_dict[self._listener.uid] = AriaDownloadStatus(self.gid, self._listener)
+ download_dict[self._listener.uid].is_torrent =True
update_all_messages()
LOGGER.info(f'Changed gid from {gid} to {self.gid}')
else:
@@ -35,12 +39,16 @@ def __onDownloadComplete(self, api: API, gid):
def __onDownloadPause(self, api, gid):
if self.gid == gid:
LOGGER.info("Called onDownloadPause")
- self._listener.onDownloadError('Download stopped by user!')
+ download = api.get_download(gid)
+ error = download.error_message
+ self._listener.onDownloadError(error)
def __onDownloadStopped(self, api, gid):
if self.gid == gid:
LOGGER.info("Called on_download_stop")
- self._listener.onDownloadError('Download stopped by user!')
+ download = api.get_download(gid)
+ error = download.error_message
+ self._listener.onDownloadError(error)
def __onDownloadError(self, api, gid):
with self._resource_lock:
@@ -51,10 +59,14 @@ def __onDownloadError(self, api, gid):
self._listener.onDownloadError(error)
def add_download(self, link: str, path):
- if is_magnet(link):
- download = aria2.add_magnet(link, {'dir': path})
- else:
- download = aria2.add_uris([link], {'dir': path})
+ try:
+ if is_magnet(link):
+ download = aria2.add_magnet(link, {'dir': path})
+ else:
+ download = aria2.add_uris([link], {'dir': path})
+ except ClientException as err:
+ self._listener.onDownloadError(err.message)
+ return
self.gid = download.gid
with download_dict_lock:
download_dict[self._listener.uid] = AriaDownloadStatus(self.gid, self._listener)
diff --git a/bot/helper/mirror_utils/download_utils/youtube_dl_download_helper.py b/bot/helper/mirror_utils/download_utils/youtube_dl_download_helper.py
new file mode 100644
index 0000000..66e87c1
--- /dev/null
+++ b/bot/helper/mirror_utils/download_utils/youtube_dl_download_helper.py
@@ -0,0 +1,142 @@
+from .download_helper import DownloadHelper
+import time
+from youtube_dl import YoutubeDL, DownloadError
+import threading
+from bot import LOGGER, download_dict_lock, download_dict, DOWNLOAD_DIR
+from ..status_utils.youtube_dl_download_status import YoutubeDLDownloadStatus
+import logging
+import re
+
+LOGGER = logging.getLogger(__name__)
+
+
+class MyLogger:
+ def __init__(self, obj):
+ self.obj = obj
+
+ def debug(self, msg):
+ LOGGER.debug(msg)
+ # Hack to fix changing changing extension
+ match = re.search(r'.ffmpeg..Merging formats into..(.*?).$', msg)
+ if match and not self.obj.is_playlist:
+ self.obj.name = match.group(1)
+
+ def warning(self, msg):
+ LOGGER.warning(msg)
+
+ def error(self, msg):
+ LOGGER.error(msg)
+ #self.obj.onDownloadError(msg)
+
+
+class YoutubeDLHelper(DownloadHelper):
+ def __init__(self, listener):
+ super().__init__()
+ self.__name = ""
+ self.__start_time = time.time()
+ self.__listener = listener
+ self.__gid = ""
+ self.opts = {
+ 'format': 'bestaudio/best',
+ 'progress_hooks': [self.__onDownloadProgress],
+ 'usenetrc': True,
+ 'format': "best"
+ }
+ self.ydl = YoutubeDL(self.opts)
+ self.__download_speed = 0
+ self.download_speed_readable = ''
+ self.downloaded_bytes = 0
+ self.size = 0
+ self.is_playlist = False
+ self.last_downloaded = 0
+ self.is_cancelled = False
+ self.vid_id = ''
+ self.__resource_lock = threading.RLock()
+
+ @property
+ def download_speed(self):
+ with self.__resource_lock:
+ return self.__download_speed
+
+ @property
+ def gid(self):
+ with self.__resource_lock:
+ return self.__gid
+
+ def __onDownloadProgress(self, d):
+ if self.is_cancelled:
+ raise ValueError("Cancelling Download..")
+ if d['status'] == "finished":
+ if self.is_playlist:
+ self.last_downloaded = 0
+ elif d['status'] == "downloading":
+ with self.__resource_lock:
+ self.__download_speed = d['speed']
+ if self.is_playlist:
+ progress = d['downloaded_bytes'] / d['total_bytes']
+ chunk_size = d['downloaded_bytes'] - self.last_downloaded
+ self.last_downloaded = d['total_bytes'] * progress
+ self.downloaded_bytes += chunk_size
+ self.progress = (self.downloaded_bytes / self.size) * 100
+ else:
+ self.download_speed_readable = d['_speed_str']
+ self.downloaded_bytes = d['downloaded_bytes']
+
+ def __onDownloadStart(self):
+ with download_dict_lock:
+ download_dict[self.__listener.uid] = YoutubeDLDownloadStatus(self, self.__listener.uid)
+
+ def __onDownloadComplete(self):
+ self.__listener.onDownloadComplete()
+
+ def onDownloadError(self, error):
+ self.__listener.onDownloadError(error)
+
+ def extractMetaData(self, link):
+ result = self.ydl.extract_info(link, download=False)
+ if result.get('direct'):
+ return None
+ if 'entries' in result:
+ video = result['entries'][0]
+ for v in result['entries']:
+ if v.get('filesize'):
+ self.size += float(v['filesize'])
+ self.name = video.get('playlist_title')
+ self.vid_id = video.get('id')
+ self.is_playlist = True
+ else:
+ video = result
+ if video.get('filesize'):
+ self.size = int(video.get('filesize'))
+ self.name = f"{video.get('title')}.{video.get('ext')}"
+ self.vid_id = video.get('id')
+ return video
+
+ def __download(self, link):
+ try:
+ self.ydl.download([link], )
+ with YoutubeDL(self.opts) as ydl:
+ try:
+ ydl.download([link])
+ except DownloadError as e:
+ self.onDownloadError(str(e))
+ return
+ self.__onDownloadComplete()
+ except ValueError:
+ LOGGER.info("Download Cancelled by User!")
+ self.onDownloadError("Download Cancelled by User!")
+
+ def add_download(self, link, path):
+ LOGGER.info(f"Downloading with YT-DL: {link}")
+ self.__gid = f"{self.vid_id}{self.__listener.uid}"
+ self.opts['logger'] = MyLogger(self)
+ if not self.is_playlist:
+ self.opts['outtmpl'] = f"{path}/%(title)s.%(ext)s"
+ else:
+ self.opts['outtmpl'] = f"{path}/%(playlist_title)s/%(title)s.%(ext)s"
+ self.ydl = YoutubeDL(self.opts)
+ self.__onDownloadStart()
+ threading.Thread(target=self.__download, args=(link,)).start()
+
+ def cancel_download(self):
+ self.is_cancelled = True
diff --git a/bot/helper/mirror_utils/status_utils/upload_status.py b/bot/helper/mirror_utils/status_utils/upload_status.py
index a7e7c60..7fad343 100644
--- a/bot/helper/mirror_utils/status_utils/upload_status.py
+++ b/bot/helper/mirror_utils/status_utils/upload_status.py
@@ -28,8 +28,10 @@ def name(self):
return self.obj.name
def progress_raw(self):
- return self.obj.uploaded_bytes / self.__size * 100
-
+ try:
+ return self.obj.uploaded_bytes / self.__size * 100
+ except ZeroDivisionError:
+ return 0
def progress(self):
return f'{round(self.progress_raw(), 2)}%'
diff --git a/bot/helper/mirror_utils/status_utils/youtube_dl_download_status.py b/bot/helper/mirror_utils/status_utils/youtube_dl_download_status.py
new file mode 100644
index 0000000..51f0582
--- /dev/null
+++ b/bot/helper/mirror_utils/status_utils/youtube_dl_download_status.py
@@ -0,0 +1,55 @@
+from bot import DOWNLOAD_DIR
+from bot.helper.ext_utils.bot_utils import MirrorStatus, get_readable_file_size, get_readable_time
+from .status import Status
+
+
+class YoutubeDLDownloadStatus(Status):
+ def __init__(self, obj, uid):
+ self.obj = obj
+ self.uid = uid
+
+ def gid(self):
+ return self.obj.gid
+
+ def path(self):
+ return f"{DOWNLOAD_DIR}{self.uid}"
+
+ def processed_bytes(self):
+ return self.obj.downloaded_bytes
+
+ def size_raw(self):
+ return self.obj.size
+
+ def size(self):
+ return get_readable_file_size(self.size_raw())
+
+ def status(self):
+ return MirrorStatus.STATUS_DOWNLOADING
+
+ def name(self):
+ return self.obj.name
+
+ def progress_raw(self):
+ return self.obj.progress
+
+ def progress(self):
+ return f'{round(self.progress_raw(), 2)}%'
+
+ def speed_raw(self):
+ """
+ :return: Download speed in Bytes/Seconds
+ """
+ return self.obj.download_speed
+
+ def speed(self):
+ return f'{get_readable_file_size(self.speed_raw())}/s'
+
+ def eta(self):
+ try:
+ seconds = (self.size_raw() - self.processed_bytes()) / self.speed_raw()
+ return f'{get_readable_time(seconds)}'
+ except ZeroDivisionError:
+ return '-'
+
+ def download(self):
+ return self.obj
diff --git a/bot/helper/mirror_utils/upload_utils/gdriveTools.py b/bot/helper/mirror_utils/upload_utils/gdriveTools.py
index 059a4f7..bea9d4f 100644
--- a/bot/helper/mirror_utils/upload_utils/gdriveTools.py
+++ b/bot/helper/mirror_utils/upload_utils/gdriveTools.py
@@ -1,33 +1,38 @@
-from googleapiclient.discovery import build
-from google_auth_oauthlib.flow import InstalledAppFlow
+import json
+import os
+import pickle
+
from google.auth.transport.requests import Request
-from googleapiclient.http import MediaFileUpload
+from google.oauth2 import service_account
+from google_auth_oauthlib.flow import InstalledAppFlow
+from googleapiclient.discovery import build
from googleapiclient.errors import HttpError
-import pickle
-import os
+from googleapiclient.http import MediaFileUpload
from tenacity import *
-import threading
-from bot import LOGGER, parent_id, DOWNLOAD_DIR, IS_TEAM_DRIVE, INDEX_URL
-from bot.helper.ext_utils.fs_utils import get_mime_type
+
+from bot import LOGGER, parent_id, DOWNLOAD_DIR, IS_TEAM_DRIVE, INDEX_URL, USE_SERVICE_ACCOUNTS
from bot.helper.ext_utils.bot_utils import *
+from bot.helper.ext_utils.fs_utils import get_mime_type
logging.getLogger('googleapiclient.discovery').setLevel(logging.ERROR)
-
+SERVICE_ACCOUNT_INDEX = 0
class GoogleDriveHelper:
-
def __init__(self, name=None, listener=None):
self.__G_DRIVE_TOKEN_FILE = "token.pickle"
# Check https://developers.google.com/drive/scopes for all available scopes
- self.__OAUTH_SCOPE = ["https://www.googleapis.com/auth/drive"]
+ self.__OAUTH_SCOPE = ['https://www.googleapis.com/auth/drive']
# Redirect URI for installed apps, can be left as is
self.__REDIRECT_URI = "urn:ietf:wg:oauth:2.0:oob"
self.__G_DRIVE_DIR_MIME_TYPE = "application/vnd.google-apps.folder"
self.__G_DRIVE_BASE_DOWNLOAD_URL = "https://drive.google.com/uc?id={}&export=download"
+ self.__G_DRIVE_DIR_BASE_DOWNLOAD_URL = "https://drive.google.com/drive/folders/{}"
self.__listener = listener
self.__service = self.authorize()
+ self.__listener = listener
self._file_uploaded_bytes = 0
self.uploaded_bytes = 0
+ self.UPDATE_INTERVAL = 5
self.start_time = 0
self.total_time = 0
self._should_update = True
@@ -51,30 +56,33 @@ def speed(self):
except ZeroDivisionError:
return 0
- @retry(wait=wait_exponential(multiplier=2, min=3, max=6),stop=stop_after_attempt(5),retry=retry_if_exception_type(HttpError),before=before_log(LOGGER,logging.DEBUG))
+ def parseLink(self,link):
+ if "folders" in link or "file" in link:
+ node_id = link.split("?")[0].split("/")[-1].replace("/",'')
+ else:
+ node_id = link.split("=")[1].split("&")[0].replace("/",'')
+ return node_id
+
+ @retry(wait=wait_exponential(multiplier=2, min=3, max=6), stop=stop_after_attempt(5),
+ retry=retry_if_exception_type(HttpError), before=before_log(LOGGER, logging.DEBUG))
def _on_upload_progress(self):
if self.status is not None:
chunk_size = self.status.total_size * self.status.progress() - self._file_uploaded_bytes
self._file_uploaded_bytes = self.status.total_size * self.status.progress()
LOGGER.info(f'Chunk size: {get_readable_file_size(chunk_size)}')
self.uploaded_bytes += chunk_size
- self.total_time += DOWNLOAD_STATUS_UPDATE_INTERVAL
+ self.total_time += self.UPDATE_INTERVAL
- def __upload_empty_file(self, path, file_name, mime_type, parent_id=None):
- media_body = MediaFileUpload(path,
- mimetype=mime_type,
- resumable=False)
- file_metadata = {
- 'name': file_name,
- 'description': 'mirror',
- 'mimeType': mime_type,
- }
- if parent_id is not None:
- file_metadata['parents'] = [parent_id]
- return self.__service.files().create(supportsTeamDrives=True,
- body=file_metadata, media_body=media_body).execute()
+ def switchServiceAccount(self):
+ global SERVICE_ACCOUNT_INDEX
+ if SERVICE_ACCOUNT_INDEX == self.service_account_count - 1:
+ SERVICE_ACCOUNT_INDEX = 0
+ SERVICE_ACCOUNT_INDEX += 1
+ LOGGER.info(f"Switching to {SERVICE_ACCOUNT_INDEX}.json service account")
+ self.__service = self.authorize()
- @retry(wait=wait_exponential(multiplier=2, min=3, max=6),stop=stop_after_attempt(5),retry=retry_if_exception_type(HttpError),before=before_log(LOGGER,logging.DEBUG))
+ @retry(wait=wait_exponential(multiplier=2, min=3, max=6), stop=stop_after_attempt(5),
+ retry=retry_if_exception_type(HttpError), before=before_log(LOGGER, logging.DEBUG))
def __set_permission(self, drive_id):
permissions = {
'role': 'reader',
@@ -84,7 +92,8 @@ def __set_permission(self, drive_id):
}
return self.__service.permissions().create(supportsTeamDrives=True, fileId=drive_id, body=permissions).execute()
- @retry(wait=wait_exponential(multiplier=2, min=3, max=6),stop=stop_after_attempt(5),retry=retry_if_exception_type(HttpError),before=before_log(LOGGER,logging.DEBUG))
+ @retry(wait=wait_exponential(multiplier=2, min=3, max=6), stop=stop_after_attempt(5),
+ retry=retry_if_exception_type(HttpError), before=before_log(LOGGER, logging.DEBUG))
def upload_file(self, file_path, file_name, mime_type, parent_id):
# File body description
file_metadata = {
@@ -103,22 +112,34 @@ def upload_file(self, file_path, file_name, mime_type, parent_id):
body=file_metadata, media_body=media_body).execute()
if not IS_TEAM_DRIVE:
self.__set_permission(response['id'])
- drive_file = self.__service.files().get(fileId=response['id']).execute()
+ drive_file = self.__service.files().get(supportsTeamDrives=True,
+ fileId=response['id']).execute()
download_url = self.__G_DRIVE_BASE_DOWNLOAD_URL.format(drive_file.get('id'))
return download_url
media_body = MediaFileUpload(file_path,
mimetype=mime_type,
resumable=True,
- chunksize=50*1024*1024)
+ chunksize=50 * 1024 * 1024)
# Insert a file
drive_file = self.__service.files().create(supportsTeamDrives=True,
- body=file_metadata, media_body=media_body)
+ body=file_metadata, media_body=media_body)
response = None
while response is None:
if self.is_cancelled:
return None
- self.status, response = drive_file.next_chunk()
+ try:
+ self.status, response = drive_file.next_chunk()
+ except HttpError as err:
+ if err.resp.get('content-type', '').startswith('application/json'):
+ reason = json.loads(err.content).get('error').get('errors')[0].get('reason')
+ if reason == 'userRateLimitExceeded' or reason == 'dailyLimitExceeded':
+ if USE_SERVICE_ACCOUNTS:
+ self.switchServiceAccount()
+ LOGGER.info(f"Got: {reason}, Trying Again.")
+ self.upload_file(file_path, file_name, mime_type, parent_id)
+ else:
+ raise err
self._file_uploaded_bytes = 0
# Insert new permissions
if not IS_TEAM_DRIVE:
@@ -129,12 +150,14 @@ def upload_file(self, file_path, file_name, mime_type, parent_id):
return download_url
def upload(self, file_name: str):
+ if USE_SERVICE_ACCOUNTS:
+ self.service_account_count = len(os.listdir("accounts"))
self.__listener.onUploadStarted()
file_dir = f"{DOWNLOAD_DIR}{self.__listener.message.message_id}"
file_path = f"{file_dir}/{file_name}"
LOGGER.info("Uploading File: " + file_path)
self.start_time = time.time()
- self.updater = setInterval(5, self._on_upload_progress)
+ self.updater = setInterval(self.UPDATE_INTERVAL, self._on_upload_progress)
if os.path.isfile(file_path):
try:
mime_type = get_mime_type(file_path)
@@ -143,9 +166,13 @@ def upload(self, file_name: str):
raise Exception('Upload has been manually cancelled')
LOGGER.info("Uploaded To G-Drive: " + file_path)
except Exception as e:
- LOGGER.info(f"Total Attempts: {e.last_attempt.attempt_number}")
- LOGGER.error(e.last_attempt.exception())
- self.__listener.onUploadError(e)
+ if isinstance(e,RetryError):
+ LOGGER.info(f"Total Attempts: {e.last_attempt.attempt_number}")
+ err = e.last_attempt.exception()
+ else:
+ err = e
+ LOGGER.error(err)
+ self.__listener.onUploadError(str(err))
return
finally:
self.updater.cancel()
@@ -158,9 +185,13 @@ def upload(self, file_name: str):
LOGGER.info("Uploaded To G-Drive: " + file_name)
link = f"https://drive.google.com/folderview?id={dir_id}"
except Exception as e:
- LOGGER.info(f"Total Attempts: {e.last_attempt.attempt_number}")
- LOGGER.error(e.last_attempt.exception())
- self.__listener.onUploadError(e)
+ if isinstance(e,RetryError):
+ LOGGER.info(f"Total Attempts: {e.last_attempt.attempt_number}")
+ err = e.last_attempt.exception()
+ else:
+ err = e
+ LOGGER.error(err)
+ self.__listener.onUploadError(str(err))
return
finally:
self.updater.cancel()
@@ -170,6 +201,94 @@ def upload(self, file_name: str):
return link
@retry(wait=wait_exponential(multiplier=2, min=3, max=6),stop=stop_after_attempt(5),retry=retry_if_exception_type(HttpError),before=before_log(LOGGER,logging.DEBUG))
+ def copyFile(self,file_id,dest_id):
+ body = {
+ 'parents': [dest_id]
+ }
+ try:
+ res = self.__service.files().copy(supportsAllDrives=True,fileId=file_id,body=body).execute()
+ return res
+ except HttpError as err:
+ if err.resp.get('content-type', '').startswith('application/json'):
+ reason = json.loads(err.content).get('error').get('errors')[0].get('reason')
+ if reason == 'userRateLimitExceeded' or reason == 'dailyLimitExceeded':
+ if USE_SERVICE_ACCOUNTS:
+ self.switchServiceAccount()
+ LOGGER.info(f"Got: {reason}, Trying Again.")
+ self.copyFile(file_id,dest_id)
+ else:
+ raise err
+
+
+ def clone(self,link):
+ self.transferred_size = 0
+ file_id = self.parseLink(link)
+ msg = ""
+ LOGGER.info(f"File ID: {file_id}")
+ try:
+ meta = self.__service.files().get(supportsAllDrives=True,fileId=file_id,fields="name,id,mimeType,size").execute()
+ except Exception as e:
+ return f"{str(e).replace('>','').replace('<','')}"
+ if meta.get("mimeType") == self.__G_DRIVE_DIR_MIME_TYPE:
+ dir_id = self.create_directory(meta.get('name'),parent_id)
+ try:
+ result = self.cloneFolder(meta.get('name'),meta.get('name'),meta.get('id'),dir_id)
+ except Exception as e:
+ if isinstance(e,RetryError):
+ LOGGER.info(f"Total Attempts: {e.last_attempt.attempt_number}")
+ err = e.last_attempt.exception()
+ else:
+ err = str(e).replace('>','').replace('<','')
+ LOGGER.error(err)
+ return err
+ msg += f'{meta.get("name")} ({get_readable_file_size(self.transferred_size)})'
+ else:
+ file = self.copyFile(meta.get('id'),parent_id)
+ msg += f'{meta.get("name")} ({get_readable_file_size(int(meta.get("size")))})'
+ return msg
+
+
+ def cloneFolder(self,name,local_path,folder_id,parent_id):
+ page_token = None
+ q =f"'{folder_id}' in parents"
+ files = []
+ LOGGER.info(f"Syncing: {local_path}")
+ new_id = None
+ while True:
+ response = self.__service.files().list(supportsTeamDrives=True,
+ includeTeamDriveItems=True,
+ q=q,
+ spaces='drive',
+ fields='nextPageToken, files(id, name, mimeType,size)',
+ pageToken=page_token).execute()
+ for file in response.get('files', []):
+ files.append(file)
+ page_token = response.get('nextPageToken', None)
+ if page_token is None:
+ break
+ if len(files) == 0:
+ return parent_id
+ for file in files:
+ if file.get('mimeType') == self.__G_DRIVE_DIR_MIME_TYPE:
+ file_path = os.path.join(local_path,file.get('name'))
+ current_dir_id = self.create_directory(file.get('name'),parent_id)
+ new_id = self.cloneFolder(file.get('name'),file_path,file.get('id'),current_dir_id)
+ else:
+ self.transferred_size += int(file.get('size'))
+ try:
+ self.copyFile(file.get('id'),parent_id)
+ new_id = parent_id
+ except Exception as e:
+ if isinstance(e,RetryError):
+ LOGGER.info(f"Total Attempts: {e.last_attempt.attempt_number}")
+ err = e.last_attempt.exception()
+ else:
+ err = e
+ LOGGER.error(err)
+ return new_id
+
+ @retry(wait=wait_exponential(multiplier=2, min=3, max=6), stop=stop_after_attempt(5),
+ retry=retry_if_exception_type(HttpError), before=before_log(LOGGER, logging.DEBUG))
def create_directory(self, directory_name, parent_id):
file_metadata = {
"name": directory_name,
@@ -207,57 +326,53 @@ def upload_dir(self, input_directory, parent_id):
def authorize(self):
# Get credentials
credentials = None
- if os.path.exists(self.__G_DRIVE_TOKEN_FILE):
- with open(self.__G_DRIVE_TOKEN_FILE, 'rb') as f:
- credentials = pickle.load(f)
- if credentials is None or not credentials.valid:
- if credentials and credentials.expired and credentials.refresh_token:
- credentials.refresh(Request())
- else:
- flow = InstalledAppFlow.from_client_secrets_file(
- 'credentials.json', self.__OAUTH_SCOPE)
- LOGGER.info(flow)
- credentials = flow.run_console(port=0)
+ if not USE_SERVICE_ACCOUNTS:
+ if os.path.exists(self.__G_DRIVE_TOKEN_FILE):
+ with open(self.__G_DRIVE_TOKEN_FILE, 'rb') as f:
+ credentials = pickle.load(f)
+ if credentials is None or not credentials.valid:
+ if credentials and credentials.expired and credentials.refresh_token:
+ credentials.refresh(Request())
+ else:
+ flow = InstalledAppFlow.from_client_secrets_file(
+ 'credentials.json', self.__OAUTH_SCOPE)
+ LOGGER.info(flow)
+ credentials = flow.run_console(port=0)
- # Save the credentials for the next run
- with open(self.__G_DRIVE_TOKEN_FILE, 'wb') as token:
- pickle.dump(credentials, token)
- return build('drive', 'v3', credentials=credentials, cache_discovery=False)
+ # Save the credentials for the next run
+ with open(self.__G_DRIVE_TOKEN_FILE, 'wb') as token:
+ pickle.dump(credentials, token)
+ else:
+ LOGGER.info(f"Authorizing with {SERVICE_ACCOUNT_INDEX}.json service account")
+ credentials = service_account.Credentials.from_service_account_file(f'accounts/{SERVICE_ACCOUNT_INDEX}.json',
+ scopes=self.__OAUTH_SCOPE)
+ return build('drive','v3', credentials=credentials, cache_discovery=False)
def drive_list(self, fileName):
msg = ""
# Create Search Query for API request.
query = f"'{parent_id}' in parents and (name contains '{fileName}')"
- page_token = None
- results = []
- while True:
- response = self.__service.files().list(supportsTeamDrives=True,
- includeTeamDriveItems=True,
- q=query,
- spaces='drive',
- fields='nextPageToken, files(id, name, mimeType, size)',
- pageToken=page_token,
- orderBy='modifiedTime desc').execute()
- for file in response.get('files', []):
- if len(results) >= 20:
- break
- if file.get(
- 'mimeType') == "application/vnd.google-apps.folder": # Detect Whether Current Entity is a Folder or File.
- msg += f"⁍ {file.get('name')}" \
- f" (folder)"
- if INDEX_URL is not None:
- url = f'{INDEX_URL}/{file.get("name")}/'
- msg += f' | Index URL'
- else:
- msg += f"⁍ {file.get('name')} ({get_readable_file_size(int(file.get('size')))})"
- if INDEX_URL is not None:
- url = f'{INDEX_URL}/{file.get("name")}'
- msg += f' | Index URL'
- msg += '\n'
- results.append(file)
- page_token = response.get('nextPageToken', None)
- if page_token is None:
- break
- del results
+ response = self.__service.files().list(supportsTeamDrives=True,
+ includeTeamDriveItems=True,
+ q=query,
+ spaces='drive',
+ pageSize=20,
+ fields='files(id, name, mimeType, size)',
+ orderBy='modifiedTime desc').execute()
+ for file in response.get('files', []):
+ if file.get(
+ 'mimeType') == "application/vnd.google-apps.folder": # Detect Whether Current Entity is a Folder or File.
+ msg += f"⁍ {file.get('name')}" \
+ f" (folder)"
+ if INDEX_URL is not None:
+ url = f'{INDEX_URL}/{file.get("name")}/'
+ msg += f' | Index URL'
+ else:
+ msg += f"⁍ {file.get('name')} ({get_readable_file_size(int(file.get('size')))})"
+ if INDEX_URL is not None:
+ url = f'{INDEX_URL}/{file.get("name")}'
+ msg += f' | Index URL'
+ msg += '\n'
+
return msg
diff --git a/bot/helper/telegram_helper/bot_commands.py b/bot/helper/telegram_helper/bot_commands.py
index 918500d..d4e8b64 100644
--- a/bot/helper/telegram_helper/bot_commands.py
+++ b/bot/helper/telegram_helper/bot_commands.py
@@ -13,6 +13,8 @@ def __init__(self):
self.StatsCommand = 'stats'
self.HelpCommand = 'help'
self.LogCommand = 'log'
-
+ self.CloneCommand = "clone"
+ self.WatchCommand = 'watch'
+ self.TarWatchCommand = 'tarwatch'
BotCommands = _BotCommands()
diff --git a/bot/modules/authorize.py b/bot/modules/authorize.py
index de254f0..fedb4d1 100644
--- a/bot/modules/authorize.py
+++ b/bot/modules/authorize.py
@@ -9,7 +9,7 @@
@run_async
-def authorize(bot, update):
+def authorize(update,context):
reply_message = update.message.reply_to_message
msg = ''
with open('authorized_chats.txt', 'a') as file:
@@ -31,11 +31,11 @@ def authorize(bot, update):
msg = 'Person Authorized to use the bot!'
else:
msg = 'Person already authorized'
- sendMessage(msg, bot, update)
+ sendMessage(msg, context.bot, update)
@run_async
-def unauthorize(bot,update):
+def unauthorize(update,context):
reply_message = update.message.reply_to_message
if reply_message is None:
# Trying to unauthorize a chat
@@ -57,7 +57,7 @@ def unauthorize(bot,update):
file.truncate(0)
for i in AUTHORIZED_CHATS:
file.write(f'{i}\n')
- sendMessage(msg, bot, update)
+ sendMessage(msg, context.bot, update)
authorize_handler = CommandHandler(command=BotCommands.AuthorizeCommand, callback=authorize,
diff --git a/bot/modules/cancel_mirror.py b/bot/modules/cancel_mirror.py
index b69079d..6d7a968 100644
--- a/bot/modules/cancel_mirror.py
+++ b/bot/modules/cancel_mirror.py
@@ -5,44 +5,60 @@
from bot.helper.ext_utils.fs_utils import clean_download
from bot.helper.telegram_helper.bot_commands import BotCommands
from time import sleep
+from bot.helper.ext_utils.bot_utils import getDownloadByGid
+from ..helper.mirror_utils.download_utils.youtube_dl_download_helper import YoutubeDLHelper
@run_async
-def cancel_mirror(bot,update):
- mirror_message = update.message.reply_to_message
- with download_dict_lock:
- keys = download_dict.keys()
- dl = download_dict[mirror_message.message_id]
- if mirror_message is None or mirror_message.message_id not in keys:
- if '/mirror' in mirror_message.text or '/tarmirror' in mirror_message.text:
- msg = 'Message has already been cancelled'
- else:
- msg = 'Please reply to the /mirror message which was used to start the download to cancel it!'
- sendMessage(msg, bot, update)
- return
+def cancel_mirror(update,context):
+ args = update.message.text.split(" ",maxsplit=1)
+ mirror_message = None
+ if len(args) > 1:
+ gid = args[1]
+ dl = getDownloadByGid(gid)
+ if not dl:
+ sendMessage(f"GID: {gid} not found.",context.bot,update)
+ return
+ with download_dict_lock:
+ keys = list(download_dict.keys())
+ mirror_message = dl._listener.message
+ elif update.message.reply_to_message:
+ mirror_message = update.message.reply_to_message
+ with download_dict_lock:
+ keys = list(download_dict.keys())
+ dl = download_dict[mirror_message.message_id]
+ if len(args) == 1:
+ if mirror_message is None or mirror_message.message_id not in keys:
+ if BotCommands.MirrorCommand in mirror_message.text or BotCommands.TarMirrorCommand in mirror_message.text:
+ msg = "Mirror already have been cancelled"
+ sendMessage(msg,context.bot,update)
+ return
+ else:
+ msg = "Please reply to the /mirror message which was used to start the download or /cancel gid to cancel it!"
+ sendMessage(msg,context.bot,update)
+ return
if dl.status() == "Uploading":
- sendMessage("Upload in Progress, Don't Cancel it.", bot, update)
+ sendMessage("Upload in Progress, Don't Cancel it.", context.bot, update)
return
elif dl.status() == "Archiving":
- sendMessage("Archival in Progress, Don't Cancel it.", bot, update)
+ sendMessage("Archival in Progress, Don't Cancel it.", context.bot, update)
return
elif dl.status() != "Queued":
download = dl.download()
- if len(download.followed_by_ids) != 0:
- downloads = aria2.get_downloads(download.followed_by_ids)
- aria2.pause(downloads)
- aria2.pause([download])
-
- elif dl.status() == "Uploading":
- sendMessage("Upload in Progress, Dont Cancel it.",bot,update)
- return
+ if isinstance(download,YoutubeDLHelper):
+ download.cancel_download()
+ else:
+ if len(download.followed_by_ids) != 0:
+ downloads = aria2.get_downloads(download.followed_by_ids)
+ aria2.pause(downloads)
+ aria2.pause([download])
else:
dl._listener.onDownloadError("Download stopped by user!")
- sleep(1) #Wait a Second For Aria2 To free Resources.
+ sleep(1) # Wait a Second For Aria2 To free Resources.
clean_download(f'{DOWNLOAD_DIR}{mirror_message.message_id}/')
@run_async
-def cancel_all(update, bot):
+def cancel_all(update, context):
with download_dict_lock:
count = 0
for dlDetails in list(download_dict.values()):
@@ -54,7 +70,7 @@ def cancel_all(update, bot):
count += 1
dlDetails._listener.onDownloadError("Download Manually Cancelled By user.")
delete_all_messages()
- sendMessage(f'Cancelled {count} downloads!', update, bot)
+ sendMessage(f'Cancelled {count} downloads!', context.bot,update)
cancel_mirror_handler = CommandHandler(BotCommands.CancelMirror, cancel_mirror,
diff --git a/bot/modules/clone.py b/bot/modules/clone.py
new file mode 100644
index 0000000..407e353
--- /dev/null
+++ b/bot/modules/clone.py
@@ -0,0 +1,23 @@
+from telegram.ext import CommandHandler, run_async
+from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper
+from bot.helper.telegram_helper.message_utils import *
+from bot.helper.telegram_helper.filters import CustomFilters
+from bot.helper.telegram_helper.bot_commands import BotCommands
+from bot import dispatcher
+
+
+@run_async
+def cloneNode(update,context):
+ args = update.message.text.split(" ",maxsplit=1)
+ if len(args) > 1:
+ link = args[1]
+ msg = sendMessage(f"Cloning: {link}",context.bot,update)
+ gd = GoogleDriveHelper()
+ result = gd.clone(link)
+ deleteMessage(context.bot,msg)
+ sendMessage(result,context.bot,update)
+ else:
+ sendMessage("Provide G-Drive Shareable Link to Clone.",bot,update)
+
+clone_handler = CommandHandler(BotCommands.CloneCommand,cloneNode,filters=CustomFilters.authorized_chat | CustomFilters.authorized_user)
+dispatcher.add_handler(clone_handler)
\ No newline at end of file
diff --git a/bot/modules/list.py b/bot/modules/list.py
index bef678c..24a641c 100644
--- a/bot/modules/list.py
+++ b/bot/modules/list.py
@@ -7,18 +7,18 @@
from bot.helper.telegram_helper.bot_commands import BotCommands
@run_async
-def list_drive(bot,update):
+def list_drive(update,context):
message = update.message.text
search = message.split(' ',maxsplit=1)[1]
LOGGER.info(f"Searching: {search}")
gdrive = GoogleDriveHelper(None)
msg = gdrive.drive_list(search)
if msg:
- reply_message = sendMessage(msg, bot, update)
+ reply_message = sendMessage(msg, context.bot, update)
else:
- reply_message = sendMessage('No result found', bot, update)
+ reply_message = sendMessage('No result found', context.bot, update)
- threading.Thread(target=auto_delete_message, args=(bot, update.message, reply_message)).start()
+ threading.Thread(target=auto_delete_message, args=(context.bot, update.message, reply_message)).start()
list_handler = CommandHandler(BotCommands.ListCommand, list_drive,filters=CustomFilters.authorized_chat | CustomFilters.authorized_user)
diff --git a/bot/modules/mirror.py b/bot/modules/mirror.py
index ad474c9..0705677 100644
--- a/bot/modules/mirror.py
+++ b/bot/modules/mirror.py
@@ -11,14 +11,16 @@
from bot.helper.ext_utils.bot_utils import setInterval
from bot.helper.telegram_helper.filters import CustomFilters
from bot.helper.telegram_helper.bot_commands import BotCommands
+from bot.helper.mirror_utils.download_utils.youtube_dl_download_helper import YoutubeDLHelper
import pathlib
import os
class MirrorListener(listeners.MirrorListeners):
- def __init__(self, bot, update, isTar=False):
+ def __init__(self, bot, update, isTar=False, tag=None):
super().__init__(bot, update)
self.isTar = isTar
+ self.tag = tag
def onDownloadStarted(self):
pass
@@ -28,9 +30,12 @@ def onDownloadProgress(self):
pass
def clean(self):
- Interval[0].cancel()
- del Interval[0]
- delete_all_messages()
+ try:
+ Interval[0].cancel()
+ del Interval[0]
+ delete_all_messages()
+ except IndexError:
+ pass
def onDownloadComplete(self):
with download_dict_lock:
@@ -52,15 +57,19 @@ def onDownloadComplete(self):
else:
path = f'{DOWNLOAD_DIR}{self.uid}/{download_dict[self.uid].name()}'
up_name = pathlib.PurePath(path).name
+ LOGGER.info(f"Upload Name : {up_name}")
+ drive = gdriveTools.GoogleDriveHelper(up_name, self)
+ if size == 0:
+ size = fs_utils.get_path_size(m_path)
+ upload_status = UploadStatus(drive, size, self.uid)
with download_dict_lock:
- LOGGER.info(f"Upload Name : {up_name}")
- drive = gdriveTools.GoogleDriveHelper(up_name, self)
- upload_status = UploadStatus(drive, size, self.uid)
download_dict[self.uid] = upload_status
update_all_messages()
drive.upload(up_name)
def onDownloadError(self, error):
+ error = error.replace('<', ' ')
+ error = error.replace('>', ' ')
LOGGER.info(self.update.effective_chat.id)
with download_dict_lock:
try:
@@ -98,6 +107,8 @@ def onUploadComplete(self, link: str):
if os.path.isdir(f'{DOWNLOAD_DIR}/{self.uid}/{download_dict[self.uid].name()}'):
share_url += '/'
msg += f'\n\n Shareable link: here'
+ if self.tag is not None:
+ msg += f'\ncc: @{self.tag}'
try:
fs_utils.clean_download(download_dict[self.uid].path())
except FileNotFoundError:
@@ -133,19 +144,18 @@ def _mirror(bot, update, isTar=False):
link = ''
LOGGER.info(link)
link = link.strip()
-
- if len(link) == 0:
- if update.message.reply_to_message is not None:
- document = update.message.reply_to_message.document
- if document is not None and document.mime_type == "application/x-bittorrent":
- link = document.get_file().file_path
- else:
- sendMessage('Only torrent files can be mirrored from telegram', bot, update)
- return
+ reply_to = update.message.reply_to_message
+ if reply_to is not None:
+ tag = reply_to.from_user.username
+ if len(link) == 0:
+ if reply_to.document is not None and reply_to.document.mime_type == "application/x-bittorrent":
+ link = reply_to.document.get_file().file_path
+ else:
+ tag = None
if not bot_utils.is_url(link) and not bot_utils.is_magnet(link):
sendMessage('No download source provided', bot, update)
return
- listener = MirrorListener(bot, update, isTar)
+ listener = MirrorListener(bot, update, isTar, tag)
aria = aria2_download.AriaDownloadHelper(listener)
aria.add_download(link, f'{DOWNLOAD_DIR}/{listener.uid}/')
sendStatusMessage(update, bot)
@@ -154,13 +164,13 @@ def _mirror(bot, update, isTar=False):
@run_async
-def mirror(bot, update):
- _mirror(bot, update)
+def mirror(update, context):
+ _mirror(context.bot, update)
@run_async
-def tar_mirror(update, bot):
- _mirror(update, bot, True)
+def tar_mirror(update, context):
+ _mirror(context.bot, update, True)
mirror_handler = CommandHandler(BotCommands.MirrorCommand, mirror,
diff --git a/bot/modules/mirror_status.py b/bot/modules/mirror_status.py
index bc2a913..e4ae324 100644
--- a/bot/modules/mirror_status.py
+++ b/bot/modules/mirror_status.py
@@ -9,11 +9,11 @@
import threading
@run_async
-def mirror_status(bot,update):
+def mirror_status(update,context):
message = get_readable_message()
if len(message) == 0:
message = "No active downloads"
- reply_message = sendMessage(message, bot, update)
+ reply_message = sendMessage(message, context.bot, update)
threading.Thread(target=auto_delete_message, args=(bot, update.message, reply_message)).start()
return
index = update.effective_chat.id
@@ -21,8 +21,8 @@ def mirror_status(bot,update):
if index in status_reply_dict.keys():
deleteMessage(bot, status_reply_dict[index])
del status_reply_dict[index]
- sendStatusMessage(update,bot)
- deleteMessage(bot,update.message)
+ sendStatusMessage(update,context.bot)
+ deleteMessage(context.bot,update.message)
mirror_status_handler = CommandHandler(BotCommands.StatusCommand, mirror_status,
diff --git a/bot/modules/watch.py b/bot/modules/watch.py
new file mode 100644
index 0000000..80148b8
--- /dev/null
+++ b/bot/modules/watch.py
@@ -0,0 +1,47 @@
+from telegram.ext import CommandHandler, run_async
+from telegram import Bot, Update
+from bot import Interval, INDEX_URL, DOWNLOAD_DIR, DOWNLOAD_STATUS_UPDATE_INTERVAL, dispatcher, LOGGER
+from bot.helper.ext_utils.bot_utils import setInterval
+from bot.helper.telegram_helper.message_utils import update_all_messages, sendMessage
+from .mirror import MirrorListener
+from bot.helper.mirror_utils.download_utils.youtube_dl_download_helper import YoutubeDLHelper
+from bot.helper.telegram_helper.bot_commands import BotCommands
+from bot.helper.telegram_helper.filters import CustomFilters
+
+
+def _watch(bot: Bot, update: Update, args: list, isTar=False):
+ try:
+ link = args[0]
+ except IndexError:
+ sendMessage('/watch [yt_dl supported link] to mirror with youtube_dl', bot, update)
+ return
+ reply_to = update.message.reply_to_message
+ if reply_to is not None:
+ tag = reply_to.from_user.username
+ else:
+ tag = None
+
+ listener = MirrorListener(bot, update, isTar, tag)
+ ydl = YoutubeDLHelper(listener)
+ ydl.add_download(link, f'{DOWNLOAD_DIR}{listener.uid}')
+ if len(Interval) == 0:
+ Interval.append(setInterval(DOWNLOAD_STATUS_UPDATE_INTERVAL, update_all_messages))
+
+
+@run_async
+def watchTar(bot: Bot, update: Update, args: list):
+ _watch(bot, update, args, True)
+
+
+def watch(bot: Bot, update: Update, args: list):
+ _watch(bot, update, args)
+
+
+mirror_handler = CommandHandler(BotCommands.WatchCommand, watch,
+ pass_args=True,
+ filters=CustomFilters.authorized_chat | CustomFilters.authorized_user)
+tar_mirror_handler = CommandHandler(BotCommands.TarWatchCommand, watchTar,
+ pass_args=True,
+ filters=CustomFilters.authorized_chat | CustomFilters.authorized_user)
+dispatcher.add_handler(mirror_handler)
+dispatcher.add_handler(tar_mirror_handler)
diff --git a/config_sample.env b/config_sample.env
index 2e8252e..686d440 100644
--- a/config_sample.env
+++ b/config_sample.env
@@ -10,3 +10,4 @@ DOWNLOAD_STATUS_UPDATE_INTERVAL = 5
AUTO_DELETE_MESSAGE_DURATION = 20
IS_TEAM_DRIVE = ""
INDEX_URL = ""
+USE_SERVICE_ACCOUNTS = ""
\ No newline at end of file
diff --git a/gen_sa_accounts.py b/gen_sa_accounts.py
new file mode 100644
index 0000000..c77ef65
--- /dev/null
+++ b/gen_sa_accounts.py
@@ -0,0 +1,365 @@
+import errno
+import os
+import pickle
+import sys
+from argparse import ArgumentParser
+from base64 import b64decode
+from glob import glob
+from json import loads
+from random import choice
+from time import sleep
+
+from google.auth.transport.requests import Request
+from google_auth_oauthlib.flow import InstalledAppFlow
+from googleapiclient.discovery import build
+from googleapiclient.errors import HttpError
+
+SCOPES = ['https://www.googleapis.com/auth/drive', 'https://www.googleapis.com/auth/cloud-platform',
+ 'https://www.googleapis.com/auth/iam']
+project_create_ops = []
+current_key_dump = []
+sleep_time = 30
+
+
+# Create count SAs in project
+def _create_accounts(service, project, count):
+ batch = service.new_batch_http_request(callback=_def_batch_resp)
+ for i in range(count):
+ aid = _generate_id('mfc-')
+ batch.add(service.projects().serviceAccounts().create(name='projects/' + project, body={'accountId': aid,
+ 'serviceAccount': {
+ 'displayName': aid}}))
+ batch.execute()
+
+
+# Create accounts needed to fill project
+def _create_remaining_accounts(iam, project):
+ print('Creating accounts in %s' % project)
+ sa_count = len(_list_sas(iam, project))
+ while sa_count != 100:
+ _create_accounts(iam, project, 100 - sa_count)
+ sa_count = len(_list_sas(iam, project))
+
+
+# Generate a random id
+def _generate_id(prefix='saf-'):
+ chars = '-abcdefghijklmnopqrstuvwxyz1234567890'
+ return prefix + ''.join(choice(chars) for _ in range(25)) + choice(chars[1:])
+
+
+# List projects using service
+def _get_projects(service):
+ return [i['projectId'] for i in service.projects().list().execute()['projects']]
+
+
+# Default batch callback handler
+def _def_batch_resp(id, resp, exception):
+ if exception is not None:
+ if str(exception).startswith(' 0:
+ current_count = len(_get_projects(cloud))
+ if current_count + create_projects <= max_projects:
+ print('Creating %d projects' % (create_projects))
+ nprjs = _create_projects(cloud, create_projects)
+ selected_projects = nprjs
+ else:
+ sys.exit('No, you cannot create %d new project (s).\n'
+ 'Please reduce value of --quick-setup.\n'
+ 'Remember that you can totally create %d projects (%d already).\n'
+ 'Please do not delete existing projects unless you know what you are doing' % (
+ create_projects, max_projects, current_count))
+ else:
+ print('Will overwrite all service accounts in existing projects.\n'
+ 'So make sure you have some projects already.')
+ input("Press Enter to continue...")
+
+ if enable_services:
+ ste = []
+ ste.append(enable_services)
+ if enable_services == '~':
+ ste = selected_projects
+ elif enable_services == '*':
+ ste = _get_projects(cloud)
+ services = [i + '.googleapis.com' for i in services]
+ print('Enabling services')
+ _enable_services(serviceusage, ste, services)
+ if create_sas:
+ stc = []
+ stc.append(create_sas)
+ if create_sas == '~':
+ stc = selected_projects
+ elif create_sas == '*':
+ stc = _get_projects(cloud)
+ for i in stc:
+ _create_remaining_accounts(iam, i)
+ if download_keys:
+ try:
+ os.mkdir(path)
+ except OSError as e:
+ if e.errno == errno.EEXIST:
+ pass
+ else:
+ raise
+ std = []
+ std.append(download_keys)
+ if download_keys == '~':
+ std = selected_projects
+ elif download_keys == '*':
+ std = _get_projects(cloud)
+ _create_sa_keys(iam, std, path)
+ if delete_sas:
+ std = []
+ std.append(delete_sas)
+ if delete_sas == '~':
+ std = selected_projects
+ elif delete_sas == '*':
+ std = _get_projects(cloud)
+ for i in std:
+ print('Deleting service accounts in %s' % i)
+ _delete_sas(iam, i)
+
+
+if __name__ == '__main__':
+ parse = ArgumentParser(description='A tool to create Google service accounts.')
+ parse.add_argument('--path', '-p', default='accounts',
+ help='Specify an alternate directory to output the credential files.')
+ parse.add_argument('--token', default='token.pickle', help='Specify the pickle token file path.')
+ parse.add_argument('--credentials', default='credentials.json', help='Specify the credentials file path.')
+ parse.add_argument('--list-projects', default=False, action='store_true',
+ help='List projects viewable by the user.')
+ parse.add_argument('--list-sas', default=False, help='List service accounts in a project.')
+ parse.add_argument('--create-projects', type=int, default=None, help='Creates up to N projects.')
+ parse.add_argument('--max-projects', type=int, default=12, help='Max amount of project allowed. Default: 12')
+ parse.add_argument('--enable-services', default=None,
+ help='Enables services on the project. Default: IAM and Drive')
+ parse.add_argument('--services', nargs='+', default=['iam', 'drive'],
+ help='Specify a different set of services to enable. Overrides the default.')
+ parse.add_argument('--create-sas', default=None, help='Create service accounts in a project.')
+ parse.add_argument('--delete-sas', default=None, help='Delete service accounts in a project.')
+ parse.add_argument('--download-keys', default=None, help='Download keys for all the service accounts in a project.')
+ parse.add_argument('--quick-setup', default=None, type=int,
+ help='Create projects, enable services, create service accounts and download keys. ')
+ parse.add_argument('--new-only', default=False, action='store_true', help='Do not use exisiting projects.')
+ args = parse.parse_args()
+ # If credentials file is invalid, search for one.
+ if not os.path.exists(args.credentials):
+ options = glob('*.json')
+ print('No credentials found at %s. Please enable the Drive API in:\n'
+ 'https://developers.google.com/drive/api/v3/quickstart/python\n'
+ 'and save the json file as credentials.json' % args.credentials)
+ if len(options) < 1:
+ exit(-1)
+ else:
+ i = 0
+ print('Select a credentials file below.')
+ inp_options = [str(i) for i in list(range(1, len(options) + 1))] + options
+ while i < len(options):
+ print(' %d) %s' % (i + 1, options[i]))
+ i += 1
+ inp = None
+ while True:
+ inp = input('> ')
+ if inp in inp_options:
+ break
+ if inp in options:
+ args.credentials = inp
+ else:
+ args.credentials = options[int(inp) - 1]
+ print('Use --credentials %s next time to use this credentials file.' % args.credentials)
+ if args.quick_setup:
+ opt = '*'
+ if args.new_only:
+ opt = '~'
+ args.services = ['iam', 'drive']
+ args.create_projects = args.quick_setup
+ args.enable_services = opt
+ args.create_sas = opt
+ args.download_keys = opt
+ resp = serviceaccountfactory(
+ path=args.path,
+ token=args.token,
+ credentials=args.credentials,
+ list_projects=args.list_projects,
+ list_sas=args.list_sas,
+ create_projects=args.create_projects,
+ max_projects=args.max_projects,
+ create_sas=args.create_sas,
+ delete_sas=args.delete_sas,
+ enable_services=args.enable_services,
+ services=args.services,
+ download_keys=args.download_keys
+ )
+ if resp is not None:
+ if args.list_projects:
+ if resp:
+ print('Projects (%d):' % len(resp))
+ for i in resp:
+ print(' ' + i)
+ else:
+ print('No projects.')
+ elif args.list_sas:
+ if resp:
+ print('Service accounts in %s (%d):' % (args.list_sas, len(resp)))
+ for i in resp:
+ print(' %s (%s)' % (i['email'], i['uniqueId']))
+ else:
+ print('No service accounts.')
diff --git a/requirements.txt b/requirements.txt
index a917da1..c5ec760 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,7 +1,8 @@
-python-telegram-bot==12.2.0
+python-telegram-bot==12.6.1
google-api-python-client>=1.7.11,<1.7.20
google-auth-httplib2>=0.0.3,<0.1.0
google-auth-oauthlib>=0.4.1,<0.10.0
aria2p>=0.3.0,<0.10.0
python-dotenv>=0.10
-tenacity>=6.0.0
+tenacity>=6.0.0
+youtube-dl