magnetico_merge.py 19.5 KB
Newer Older
Glandos's avatar
Glandos committed
1
2
#!/usr/bin/env python3

Glandos's avatar
Glandos committed
3
4
from __future__ import annotations

Glandos's avatar
Glandos committed
5
6
import sqlite3
import sys
Glandos's avatar
Glandos committed
7
8
import pathlib

Glandos's avatar
Glandos committed
9
10
11
from abc import ABC, abstractmethod
from functools import cached_property
from typing import Union, Optional, List, Dict, Tuple
Glandos's avatar
Glandos committed
12
13

import click
14

Glandos's avatar
Glandos committed
15
16
17
try:
    import psycopg2
    import psycopg2.extras
Glandos's avatar
Glandos committed
18
    import psycopg2.errorcodes
Glandos's avatar
Glandos committed
19
20
21
except ImportError:
    psycopg2 = None

Glandos's avatar
Glandos committed
22
23
24
25
26
try:
    import pgcopy
except ImportError:
    pgcopy = None

27
28
# 4204942: b'\x00\x00\x02\x0100\x00'
# 10196786: b'\xff\xfe1\x00'
Glandos's avatar
Glandos committed
29

30
31
32
#  [#############################################################----------------]  4204000/14135209  05:42:40Removing null bytes from 1000 rows
#  [################################################################-------------]  4416000/14135209  05:43:0

Glandos's avatar
Glandos committed
33
34
35
"""
Schema:
CREATE TABLE torrents (
Glandos's avatar
Glandos committed
36
37
38
39
40
41
42
43
44
45
46
47
            id             INTEGER PRIMARY KEY,
            info_hash      BLOB NOT NULL UNIQUE,
            name           TEXT NOT NULL,
            total_size     INTEGER NOT NULL CHECK(total_size > 0),
            discovered_on  INTEGER NOT NULL CHECK(discovered_on > 0)
        ,
         updated_on INTEGER CHECK (updated_on > 0) DEFAULT NULL,
          n_seeders  INTEGER CHECK ((updated_on IS NOT NULL AND n_seeders >= 0) OR (updated_on IS NULL AND n_seeders IS NULL)) DEFAULT NULL,
           n_leechers INTEGER CHECK ((updated_on IS NOT NULL AND n_leechers >= 0) OR (updated_on IS NULL AND n_leechers IS NULL)) DEFAULT NULL,
            modified_on INTEGER NOT NULL
                CHECK (modified_on >= discovered_on AND (updated_on IS NOT NULL OR modified_on >= updated_on))
                DEFAULT 32503680000);
Glandos's avatar
Glandos committed
48
CREATE TABLE files (
Glandos's avatar
Glandos committed
49
50
51
52
53
            id          INTEGER PRIMARY KEY,
            torrent_id  INTEGER REFERENCES torrents ON DELETE CASCADE ON UPDATE RESTRICT,
            size        INTEGER NOT NULL,
            path        TEXT NOT NULL
        , is_readme INTEGER CHECK (is_readme IS NULL OR is_readme=1) DEFAULT NULL, content   TEXT    CHECK ((content IS NULL AND is_readme IS NULL) OR (content IS NOT NULL AND is_readme=1)) DEFAULT NULL);
Glandos's avatar
Glandos committed
54
55
56
57
58
59
60
61
62
CREATE UNIQUE INDEX info_hash_index ON torrents	(info_hash);
CREATE UNIQUE INDEX readme_index ON files (torrent_id, is_readme);
CREATE VIRTUAL TABLE torrents_idx USING fts5(name, content='torrents', content_rowid='id', tokenize="porter unicode61 separators ' !""#$%&''()*+,-./:;<=>?@[\]^_`{|}~'")
/* torrents_idx(name) */;
CREATE TABLE IF NOT EXISTS 'torrents_idx_data'(id INTEGER PRIMARY KEY, block BLOB);
CREATE TABLE IF NOT EXISTS 'torrents_idx_idx'(segid, term, pgno, PRIMARY KEY(segid, term)) WITHOUT ROWID;
CREATE TABLE IF NOT EXISTS 'torrents_idx_docsize'(id INTEGER PRIMARY KEY, sz BLOB);
CREATE TABLE IF NOT EXISTS 'torrents_idx_config'(k PRIMARY KEY, v) WITHOUT ROWID;
CREATE TRIGGER torrents_idx_ai_t AFTER INSERT ON torrents BEGIN
Glandos's avatar
Glandos committed
63
64
              INSERT INTO torrents_idx(rowid, name) VALUES (new.id, new.name);
            END;
Glandos's avatar
Glandos committed
65
CREATE TRIGGER torrents_idx_ad_t AFTER DELETE ON torrents BEGIN
Glandos's avatar
Glandos committed
66
67
              INSERT INTO torrents_idx(torrents_idx, rowid, name) VALUES('delete', old.id, old.name);
            END;
Glandos's avatar
Glandos committed
68
CREATE TRIGGER torrents_idx_au_t AFTER UPDATE ON torrents BEGIN
Glandos's avatar
Glandos committed
69
70
71
              INSERT INTO torrents_idx(torrents_idx, rowid, name) VALUES('delete', old.id, old.name);
              INSERT INTO torrents_idx(rowid, name) VALUES (new.id, new.name);
            END;
Glandos's avatar
Glandos committed
72
CREATE TRIGGER "torrents_modified_on_default_t" AFTER INSERT ON "torrents" BEGIN
Glandos's avatar
Glandos committed
73
              UPDATE "torrents" SET "modified_on" = NEW."discovered_on" WHERE "id" = NEW."id" AND NEW."modified_on" = 32503680000;
Glandos's avatar
Glandos committed
74
75
76
77
            END;
CREATE INDEX modified_on_index ON torrents (modified_on);
"""

Glandos's avatar
Glandos committed
78
79
Connection = Union[sqlite3.Connection, psycopg2._psycopg.connection]
Cursor = Union[sqlite3.Cursor, psycopg2._psycopg.cursor]
Glandos's avatar
Glandos committed
80

Glandos's avatar
Glandos committed
81

Glandos's avatar
Glandos committed
82
83
84
85
86
class Database(ABC):
    def __init__(self, dsn: str, source: Database = None):
        self.cursor: Optional[Cursor] = None
        self.connection: Optional[Connection] = None
        self.source = source
Glandos's avatar
Glandos committed
87

Glandos's avatar
Glandos committed
88
89
        self.connection = self.connect(dsn)
        self.cursor = self.connection.cursor()
Glandos's avatar
Glandos committed
90

Glandos's avatar
Glandos committed
91
        self.torrents_table = "torrents"
Glandos's avatar
Glandos committed
92
        self.options = {}
Glandos's avatar
Glandos committed
93
94
95
96
97
98
99

    @classmethod
    def from_dsn(cls, dsn: str, source: Database = None) -> Database:
        if dsn.startswith("postgresql://"):
            return PostgreSQL(dsn, source)
        elif pathlib.Path(dsn).exists():
            return SQLite(dsn, source)
Glandos's avatar
Glandos committed
100
101
102
103
104

    def __del__(self):
        self.close()

    def close(self):
Glandos's avatar
Glandos committed
105
106
107
108
109
110
111
        if self.cursor is not None:
            self.cursor.close()
            self.cursor = None
        if self.connection is not None:
            self.connection.close()
            self.connection = None

Glandos's avatar
Glandos committed
112
113
114
    def set_options(self, options: dict = {}):
        self.options = options

Glandos's avatar
Glandos committed
115
116
117
118
    @abstractmethod
    def connect(self, dsn: str) -> Connection:
        pass

Glandos's avatar
Glandos committed
119
120
121
122
123
124
    def before_import(self):
        pass

    def after_import(self):
        pass

Glandos's avatar
Glandos committed
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
    @property
    def torrents_count(self) -> int:
        self.cursor.execute("SELECT count(*) from torrents")
        return self.cursor.fetchone()[0]

    @property
    @abstractmethod
    def file_columns(self) -> list[str]:
        pass

    @property
    @abstractmethod
    def torrent_columns(self) -> list[str]:
        pass

    def get_torrents_cursor(self, arraysize=1000) -> Cursor:
        select_cursor = self.connection.cursor()
        select_cursor.arraysize = arraysize
        select_cursor.execute(f"SELECT * FROM {self.torrents_table}")
        return select_cursor
Glandos's avatar
Glandos committed
145

Glandos's avatar
Glandos committed
146
147
148
149
150
    @abstractmethod
    def merge_torrents(
        self, torrents: list[dict]
    ) -> dict[Union["inserted", "failed"], int]:
        pass
Glandos's avatar
Glandos committed
151

Glandos's avatar
Glandos committed
152
153
154
155
    @property
    @abstractmethod
    def placeholder(self):
        pass
Glandos's avatar
Glandos committed
156
157


Glandos's avatar
Glandos committed
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
class SQLite(Database):
    def __init__(self, filename: str, source: SQLite = None):
        if source is not None and not isinstance(source, SQLite):
            raise NotImplemented("SQLite target can only use SQLite source")
        super().__init__(filename, source)
        # For type hints
        self.source = source

    def close(self):
        if not self.merged_source:
            super().close()

    def connect(self, dsn: str) -> sqlite3.Connection:
        if self.merged_source:
            with self.source.connection:  # Shortcut available only in sqlite
                self.source.connection.execute("ATTACH ? AS target_db", (dsn,))
            return self.source.connection
        else:
            connection = sqlite3.connect(dsn)
            connection.row_factory = sqlite3.Row
            # Some name were inserted invalid. Correct them.
            connection.text_factory = lambda x: x.decode("utf8", errors="replace")
            return connection

    @cached_property
    def file_columns(self):
        self.cursor.execute(
Glandos's avatar
Glandos committed
185
186
            "SELECT name FROM pragma_table_info('files') WHERE name not in ('id', 'torrent_id')"
        )
Glandos's avatar
Glandos committed
187
        return [row[0] for row in self.cursor]
Glandos's avatar
Glandos committed
188

Glandos's avatar
Glandos committed
189
190
191
    @cached_property
    def torrent_columns(self):
        self.cursor.execute(
Glandos's avatar
Glandos committed
192
193
            "SELECT name FROM pragma_table_info('torrents') WHERE name not in ('id')"
        )
Glandos's avatar
Glandos committed
194
        return [row[0] for row in self.cursor]
Glandos's avatar
Glandos committed
195

Glandos's avatar
Glandos committed
196
197
198
199
    def merge_torrents(
        self, torrents: list[dict]
    ) -> dict[Union["inserted", "failed"], int]:
        torrents_statement = f"""INSERT INTO target_db.torrents ({','.join(self.torrent_columns)})
Glandos's avatar
Glandos committed
200
            VALUES ({','.join('?' * len(self.torrent_columns))}) ON CONFLICT DO NOTHING"""
Glandos's avatar
Glandos committed
201
202
203
204
205
        files_statement = f"""INSERT INTO target_db.files (torrent_id, {','.join(self.file_columns)})
        SELECT ?, {','.join(self.file_columns)} FROM files WHERE torrent_id = ?"""

        failed = 0
        inserted = 0
Glandos's avatar
Glandos committed
206
        processed = 0
Glandos's avatar
Glandos committed
207
208
209
210
211
        for torrent in torrents:
            self.cursor.execute(
                torrents_statement,
                (*[torrent[column] for column in self.torrent_columns],),
            )
Glandos's avatar
Glandos committed
212
            processed += 1
Glandos's avatar
Glandos committed
213
214
215
            if self.cursor.lastrowid is None or self.cursor.lastrowid == 0:
                failed += 1
            else:
Glandos's avatar
Glandos committed
216
                inserted += 1
Glandos's avatar
Glandos committed
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
                self.merge_files(files_statement, self.cursor.lastrowid, torrent["id"])
        return {"failed": failed, "inserted": inserted}

    def merge_files(self, statement: str, torrent_id: int, previous_torrent_id: int):
        if self.merged_source:
            self.cursor.execute(statement, (torrent_id, previous_torrent_id))

    @property
    def merged_source(self):
        # Class is tested in __init__
        return self.source is not None

    @property
    def placeholder(self):
        return "?"


class PostgreSQL(Database):
    def __init__(self, dsn: str, source: SQLite = None):
        if source is not None and not isinstance(source, SQLite):
            raise NotImplemented("PostgreSQL target can only use SQLite source")
        super().__init__(dsn, source)
        # For type hints
        self.source = source
Glandos's avatar
Glandos committed
241
242
243
        self.indices = {}
        self.create_contraint_statements = []
        self.drop_contraint_statements = []
Glandos's avatar
Glandos committed
244
        self.copy_manager = None
Glandos's avatar
Glandos committed
245
246
247
248
249
250

    def connect(self, dsn: str) -> psycopg2._psycopg.connection:
        if psycopg2 is None:
            raise click.ClickException("psycopg2 driver is missing")
        return psycopg2.connect(dsn)

Glandos's avatar
Glandos committed
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
    def generate_constraint_statements(self):
        # See https://blog.hagander.net/automatically-dropping-and-creating-constraints-131/
        self.cursor.execute(
            """
            SELECT 'ALTER TABLE '||nspname||'.\"'||relname||'\" ADD CONSTRAINT \"'
                    ||conname||'\" '|| pg_get_constraintdef(pg_constraint.oid)||';'
            FROM pg_constraint
            INNER JOIN pg_class ON conrelid=pg_class.oid
            INNER JOIN pg_namespace ON pg_namespace.oid=pg_class.relnamespace
            WHERE relname IN ('torrents', 'files') AND conname != 'torrents_info_hash_key'
            ORDER BY CASE WHEN contype='f' THEN 0 ELSE 1 END DESC,
                            contype DESC, nspname DESC, relname DESC, conname DESC"""
        )
        self.create_contraint_statements = [
            result[0] for result in self.cursor.fetchall()
        ]

        self.cursor.execute(
            """
            SELECT 'ALTER TABLE "'||nspname||'"."'||relname||'" DROP CONSTRAINT "'||conname||'";'
            FROM pg_constraint
            INNER JOIN pg_class ON conrelid=pg_class.oid
            INNER JOIN pg_namespace ON pg_namespace.oid=pg_class.relnamespace
            WHERE relname IN ('torrents', 'files') AND conname != 'torrents_info_hash_key'
            ORDER BY CASE WHEN contype='f' THEN 0 ELSE 1 END, contype, nspname, relname, conname"""
        )
        self.drop_contraint_statements = [
            result[0] for result in self.cursor.fetchall()
        ]

    def get_indices(self):
        self.cursor.execute(
            """SELECT indexname, indexdef FROM pg_indexes
                        WHERE schemaname = 'public'
                            AND tablename IN ('torrents', 'files')
                            AND indexname != 'torrents_info_hash_key'"""
        )
        return {result[0]: result[1] for result in self.cursor.fetchall()}

    def before_import(self):
        with self.connection:
            self.generate_constraint_statements()

            if self.options.get("fast", False):
                click.echo(
                    f"-> Postgresql target, dropping constraints…",
                    nl=False,
                )
                for statement in self.drop_contraint_statements:
                    self.cursor.execute(statement)
                click.echo(" Done.")

Glandos's avatar
Glandos committed
303
                # Collect indices after dropping constraints
Glandos's avatar
Glandos committed
304
305
306
307
308
309
310
311
312
                self.indices = self.get_indices()
                index_names = self.indices.keys()
                click.echo(
                    f"-> Postgresql target, dropping indices: {', '.join(index_names)}…",
                    nl=False,
                )
                self.cursor.execute(f"DROP INDEX {','.join(index_names)}")
                click.echo(" Done.")

Glandos's avatar
Glandos committed
313
314
315
316
317
318
319
                if pgcopy is not None:
                    self.copy_manager = pgcopy.CopyManager(
                        self.connection, "files", ["torrent_id", *self.file_columns]
                    )
                else:
                    click.secho("pgcopy not found, so it won't be as fast", fg="yellow")

Glandos's avatar
Glandos committed
320
321
322
323
    def after_import(self):
        with self.connection:
            if self.options.get("fast", False):
                click.echo(
Glandos's avatar
Glandos committed
324
                    f"-> Postgresql target, recreating indices: {', '.join(self.indices.keys())}…",
Glandos's avatar
Glandos committed
325
326
327
328
329
330
331
332
333
334
335
336
337
338
                    nl=False,
                )
                for statement in self.indices.values():
                    self.cursor.execute(statement)
                click.echo(" Done.")

                click.echo(
                    f"-> Postgresql target, recreating constraints…",
                    nl=False,
                )
                for statement in self.create_contraint_statements:
                    self.cursor.execute(statement)
                click.echo(" Done.")

Glandos's avatar
Glandos committed
339
340
341
342
343
    @cached_property
    def file_columns(self):
        self.cursor.execute(
            """SELECT column_name AS name FROM information_schema.columns
            WHERE table_name = 'files' and column_name not in ('id', 'torrent_id')"""
Glandos's avatar
Glandos committed
344
        )
Glandos's avatar
Glandos committed
345
        return [row[0] for row in self.cursor]
Glandos's avatar
Glandos committed
346

Glandos's avatar
Glandos committed
347
348
349
350
351
    @cached_property
    def torrent_columns(self):
        self.cursor.execute(
            """SELECT column_name AS name FROM information_schema.columns
            WHERE table_name = 'torrents' and column_name not in ('id')"""
Glandos's avatar
Glandos committed
352
        )
Glandos's avatar
Glandos committed
353
        return [row[0] for row in self.cursor]
Glandos's avatar
Glandos committed
354

Glandos's avatar
Glandos committed
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
    def merge_torrents(
        self, torrents: list[dict]
    ) -> dict[Union["inserted", "failed"], int]:
        torrents_statement = f"""INSERT INTO torrents ({','.join(self.torrent_columns)})
            VALUES %s ON CONFLICT DO NOTHING RETURNING id"""
        # We will use execute_values with a single placeholder
        files_statement = f"""INSERT INTO files (torrent_id, {','.join(self.file_columns)})
            VALUES %s ON CONFLICT DO NOTHING"""
        try:
            result = psycopg2.extras.execute_values(
                self.cursor,
                torrents_statement,
                [
                    (*[torrent[column] for column in self.torrent_columns],)
                    for torrent in torrents
                ],
                fetch=True,
Glandos's avatar
Glandos committed
372
            )
Glandos's avatar
Glandos committed
373
374
        except ValueError as e:
            if "0x00" in str(e):
375
                return self.merge_torrents(self.fix_bytes(torrents, "name"))
Glandos's avatar
Glandos committed
376
377
378
379
380
381
382
383
        self.merge_files(
            files_statement,
            {
                torrent["id"]: one_result[0]
                for (one_result, torrent) in zip(result, torrents)
                if one_result[0] is not None
            },
        )
Glandos's avatar
Glandos committed
384
385
386
        total = len(torrents)
        inserted = len(result)
        return {"failed": total - inserted, "inserted": inserted, "processed": total}
Glandos's avatar
Glandos committed
387

Glandos's avatar
Glandos committed
388
    def merge_files(self, statement: str, torrent_ids: Dict[int, int]):
Glandos's avatar
Glandos committed
389
390
391
392
393
394
395
        if self.copy_manager is not None:
            try:
                self.cursor.execute("SAVEPOINT copy_files")
                self.copy_manager.threading_copy(self.get_source_files(torrent_ids))
            except psycopg2.DataError as error:
                if error.pgcode == psycopg2.errorcodes.CHARACTER_NOT_IN_REPERTOIRE:
                    self.cursor.execute("ROLLBACK TO copy_files")
Glandos's avatar
format    
Glandos committed
396
397
398
                    self.copy_manager.threading_copy(
                        self.get_source_files(torrent_ids, True)
                    )
Glandos's avatar
Glandos committed
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
                else:
                    raise

            self.cursor.execute("RELEASE SAVEPOINT copy_files")
        else:
            # Slow path, but try to be as fast as possible
            files_cursor = self.get_source_files_cursor(tuple(torrent_ids.keys()))
            files_list = files_cursor.fetchmany()
            while files_list:
                try:
                    psycopg2.extras.execute_values(
                        self.cursor,
                        statement,
                        [
                            (
                                torrent_ids[merged_file["torrent_id"]],
                                *[merged_file[column] for column in self.file_columns],
                            )
                            for merged_file in files_list
                        ],
                    )
                    files_list = files_cursor.fetchmany()
                except ValueError as e:
                    if "0x00" in str(e):
423
                        files_list = self.fix_bytes(files_list, "path")
Glandos's avatar
Glandos committed
424
425

    def get_source_files(self, torrent_ids: Dict[int, int], fix_nul=False):
Glandos's avatar
Glandos committed
426
427
        files_cursor = self.get_source_files_cursor(tuple(torrent_ids.keys()))
        files_list = files_cursor.fetchmany()
Glandos's avatar
Glandos committed
428
        while files_list:
Glandos's avatar
Glandos committed
429
            if fix_nul:
430
                files_list = self.fix_bytes(files_list, "path")
Glandos's avatar
Glandos committed
431
432
433
434
            for merged_file in files_list:
                yield (
                    torrent_ids[merged_file["torrent_id"]],
                    *[merged_file[column] for column in self.file_columns],
435
                )
Glandos's avatar
Glandos committed
436
            files_list = files_cursor.fetchmany()
Glandos's avatar
Glandos committed
437

Glandos's avatar
Glandos committed
438
439
440
441
442
443
444
445
446
447
    def get_source_files_cursor(self, torrent_ids: Tuple[int], arraysize=1000):
        select_cursor = self.source.connection.cursor()
        select_cursor.arraysize = arraysize
        select_cursor.execute(
            f"""SELECT * FROM files WHERE torrent_id IN
            ({','.join([self.source.placeholder] * len(torrent_ids))})""",
            torrent_ids,
        )
        return select_cursor

448
    def fix_bytes(self, rows: List[dict], column: str):
Glandos's avatar
format    
Glandos committed
449
450
451
        click.secho(
            f"Fix bytes value from {len(rows)} rows in column {column}", fg="yellow"
        )
Glandos's avatar
Glandos committed
452
453
454
        # Make a dict copy to be able to modify it
        rows = [dict(row) for row in rows]
        for row in rows:
455
456
            old = row[column]
            if isinstance(old, bytes):
Glandos's avatar
format    
Glandos committed
457
                old = old.decode("utf8", errors="replace")
458
            row[column] = old.replace("\x00", "")
Glandos's avatar
Glandos committed
459
460
461
462
463
        return rows

    @property
    def placeholder(self):
        return "%s"
Glandos's avatar
Glandos committed
464
465


Glandos's avatar
Glandos committed
466
@click.command()
Glandos's avatar
Glandos committed
467
468
469
470
471
@click.option(
    "--fast",
    is_flag=True,
    help="Try to go faster, by deleting indices and removing WAL while importing. PostgreSQL only.",
)
Glandos's avatar
Glandos committed
472
473
@click.argument("main-db")
@click.argument("merged-db")
Glandos's avatar
Glandos committed
474
def main(main_db, merged_db, fast):
Glandos's avatar
Glandos committed
475
    click.echo(f"Merging {merged_db} into {main_db}")
Glandos's avatar
Glandos committed
476
477
    source = Database.from_dsn(merged_db)
    target = Database.from_dsn(main_db, source)
Glandos's avatar
Glandos committed
478
479
480
    target.set_options({"fast": fast})

    click.echo("-> Gathering source database statistics: ", nl=False)
Glandos's avatar
Glandos committed
481

Glandos's avatar
Glandos committed
482
    total_merged = source.torrents_count
483
484
    click.echo(f"{total_merged} torrents to merge.")
    failed_count = 0
Glandos's avatar
Glandos committed
485

Glandos's avatar
Glandos committed
486
487
488
489
490
491
492
493
    click.echo("-> Preparing target database")
    target.before_import()

    try:
        target.cursor.execute("BEGIN")
        arraysize = 1000
        with click.progressbar(length=total_merged, width=0, show_pos=True) as bar:
            torrents = source.get_torrents_cursor(arraysize)
Glandos's avatar
Glandos committed
494
            results = target.merge_torrents(torrents.fetchmany())
Glandos's avatar
Glandos committed
495
496
497
498
499
500
501
502
503
504
505
506
507
            while results["processed"] > 0:
                bar.update(results["processed"])
                failed_count += results["failed"]
                results = target.merge_torrents(torrents.fetchmany())

        click.echo("Comitting… ", nl=False)
        target.connection.commit()
        click.echo(
            f"OK. {total_merged} torrents processed. {failed_count} torrents were not merged due to errors."
        )
    except BaseException as e:
        click.secho(f"Error while importing {str(e)}", fg="red")
        target.connection.rollback()
508
        import traceback
Glandos's avatar
format    
Glandos committed
509

510
        traceback.print_exc()
Glandos's avatar
Glandos committed
511
512
    finally:
        target.after_import()
Glandos's avatar
Glandos committed
513

Glandos's avatar
Glandos committed
514
515
    source.close()
    target.close()
Glandos's avatar
Glandos committed
516
517
518
519


if __name__ == "__main__":
    main()