|
1
|
|
|
""" |
|
2
|
|
|
BackupPC Clone |
|
3
|
|
|
""" |
|
4
|
|
|
import os |
|
5
|
|
|
import shutil |
|
6
|
|
|
|
|
7
|
|
|
from backuppc_clone.Config import Config |
|
8
|
|
|
from backuppc_clone.DataLayer import DataLayer |
|
9
|
|
|
from backuppc_clone.ProgressBar import ProgressBar |
|
10
|
|
|
from backuppc_clone.helper.BackupScanner import BackupScanner |
|
11
|
|
|
from backuppc_clone.misc import sizeof_fmt |
|
12
|
|
|
from backuppc_clone.style.BackupPcCloneStyle import BackupPcCloneStyle |
|
13
|
|
|
|
|
14
|
|
|
|
|
15
|
|
View Code Duplication |
class BackupClone: |
|
|
|
|
|
|
16
|
|
|
""" |
|
17
|
|
|
Clones a backup of a host |
|
18
|
|
|
""" |
|
19
|
|
|
|
|
20
|
|
|
# ------------------------------------------------------------------------------------------------------------------ |
|
21
|
|
|
def __init__(self, io: BackupPcCloneStyle): |
|
22
|
|
|
""" |
|
23
|
|
|
Object constructor. |
|
24
|
|
|
|
|
25
|
|
|
:param BackupPcCloneStyle io: The output style. |
|
26
|
|
|
""" |
|
27
|
|
|
|
|
28
|
|
|
self.__io: BackupPcCloneStyle = io |
|
29
|
|
|
""" |
|
30
|
|
|
The output style. |
|
31
|
|
|
""" |
|
32
|
|
|
|
|
33
|
|
|
self.__host: str = '' |
|
34
|
|
|
""" |
|
35
|
|
|
The host of the backup. |
|
36
|
|
|
""" |
|
37
|
|
|
|
|
38
|
|
|
self.__backup_no: int = 0 |
|
39
|
|
|
""" |
|
40
|
|
|
The number of the backup. |
|
41
|
|
|
""" |
|
42
|
|
|
|
|
43
|
|
|
# ------------------------------------------------------------------------------------------------------------------ |
|
44
|
|
|
def __scan_host_backup(self, csv_filename: str) -> None: |
|
45
|
|
|
""" |
|
46
|
|
|
Scans the backup of a host. |
|
47
|
|
|
|
|
48
|
|
|
:param str csv_filename: The name of the CSV file. |
|
49
|
|
|
""" |
|
50
|
|
|
self.__io.section('Original backup') |
|
51
|
|
|
|
|
52
|
|
|
scanner = BackupScanner(self.__io) |
|
53
|
|
|
scanner.scan_directory(self.__host, self.__backup_no, csv_filename) |
|
54
|
|
|
|
|
55
|
|
|
self.__io.writeln('') |
|
56
|
|
|
self.__io.writeln(' Files found: {}'.format(scanner.file_count)) |
|
57
|
|
|
self.__io.writeln(' Directories found: {}'.format(scanner.dir_count)) |
|
58
|
|
|
self.__io.writeln('') |
|
59
|
|
|
|
|
60
|
|
|
# ------------------------------------------------------------------------------------------------------------------ |
|
61
|
|
|
def __import_host_scan_csv(self, csv_filename: str) -> None: |
|
62
|
|
|
""" |
|
63
|
|
|
Imports to CSV file with entries of the original pool into the SQLite database. |
|
64
|
|
|
|
|
65
|
|
|
:param str csv_filename: The name of the CSV file. |
|
66
|
|
|
""" |
|
67
|
|
|
self.__io.log_very_verbose(' Importing <fso>{}</fso>'.format(csv_filename)) |
|
68
|
|
|
|
|
69
|
|
|
hst_id = DataLayer.instance.get_host_id(self.__host) |
|
70
|
|
|
bck_id = DataLayer.instance.get_bck_id(hst_id, int(self.__backup_no)) |
|
71
|
|
|
|
|
72
|
|
|
DataLayer.instance.backup_empty(bck_id) |
|
73
|
|
|
DataLayer.instance.import_csv('BKC_BACKUP_TREE', |
|
74
|
|
|
['bbt_seq', 'bbt_inode_original', 'bbt_dir', 'bbt_name'], |
|
75
|
|
|
csv_filename, |
|
76
|
|
|
False, |
|
77
|
|
|
{'bck_id': bck_id}) |
|
78
|
|
|
|
|
79
|
|
|
# ------------------------------------------------------------------------------------------------------------------ |
|
80
|
|
|
def __import_pre_scan_csv(self, csv_filename: str) -> None: |
|
81
|
|
|
""" |
|
82
|
|
|
Imports to CSV file with entries of the original pool into the SQLite database. |
|
83
|
|
|
|
|
84
|
|
|
:param str csv_filename: The name of the CSV file. |
|
85
|
|
|
""" |
|
86
|
|
|
self.__io.section('Using pre-scan') |
|
87
|
|
|
|
|
88
|
|
|
self.__import_host_scan_csv(csv_filename) |
|
89
|
|
|
|
|
90
|
|
|
hst_id = DataLayer.instance.get_host_id(self.__host) |
|
91
|
|
|
bck_id = DataLayer.instance.get_bck_id(hst_id, int(self.__backup_no)) |
|
92
|
|
|
|
|
93
|
|
|
stats = DataLayer.instance.backup_get_stats(bck_id) |
|
94
|
|
|
|
|
95
|
|
|
self.__io.writeln(' Files found: {}'.format(stats['#files'])) |
|
96
|
|
|
self.__io.writeln(' Directories found: {}'.format(stats['#dirs'])) |
|
97
|
|
|
self.__io.writeln('') |
|
98
|
|
|
|
|
99
|
|
|
# ------------------------------------------------------------------------------------------------------------------ |
|
100
|
|
|
def __copy_pool_file(self, dir_name: str, file_name: str, bpl_inode_original: int) -> int: |
|
101
|
|
|
""" |
|
102
|
|
|
Copies a pool file from the Original pool to the clone pool. Returns the size eof the file. |
|
103
|
|
|
|
|
104
|
|
|
:param str dir_name: The directory name relative to the top dir. |
|
105
|
|
|
:param str file_name: The file name. |
|
106
|
|
|
:param int bpl_inode_original: The inode of the original pool file. |
|
107
|
|
|
|
|
108
|
|
|
:rtype: int |
|
109
|
|
|
""" |
|
110
|
|
|
original_path = os.path.join(Config.instance.top_dir_original, dir_name, file_name) |
|
111
|
|
|
clone_dir = os.path.join(Config.instance.top_dir_clone, dir_name) |
|
112
|
|
|
clone_path = os.path.join(clone_dir, file_name) |
|
113
|
|
|
|
|
114
|
|
|
self.__io.log_very_verbose('Coping <fso>{}</fso> to <fso>{}</fso>'.format(original_path, clone_dir)) |
|
115
|
|
|
|
|
116
|
|
|
stats_original = os.stat(original_path) |
|
117
|
|
|
# BackupPC 3.x renames pool files with hash collisions. |
|
118
|
|
|
if stats_original.st_ino != bpl_inode_original: |
|
119
|
|
|
raise FileNotFoundError("Filename '{}' and inode {} do not match".format(original_path, bpl_inode_original)) |
|
120
|
|
|
|
|
121
|
|
|
if not os.path.exists(clone_dir): |
|
122
|
|
|
os.makedirs(clone_dir) |
|
123
|
|
|
|
|
124
|
|
|
shutil.copyfile(original_path, clone_path) |
|
125
|
|
|
|
|
126
|
|
|
stats_clone = os.stat(clone_path) |
|
127
|
|
|
os.chmod(clone_path, stats_original.st_mode) |
|
128
|
|
|
os.utime(clone_path, (stats_original.st_mtime, stats_original.st_mtime)) |
|
129
|
|
|
|
|
130
|
|
|
DataLayer.instance.pool_update_by_inode_original(stats_original.st_ino, |
|
131
|
|
|
stats_clone.st_ino, |
|
132
|
|
|
stats_original.st_size, |
|
133
|
|
|
stats_original.st_mtime) |
|
134
|
|
|
|
|
135
|
|
|
return stats_original.st_size |
|
136
|
|
|
|
|
137
|
|
|
# ------------------------------------------------------------------------------------------------------------------ |
|
138
|
|
|
def __update_clone_pool(self) -> None: |
|
139
|
|
|
""" |
|
140
|
|
|
Copies required pool files from the original pool to the clone pool. |
|
141
|
|
|
""" |
|
142
|
|
|
self.__io.section('Clone pool') |
|
143
|
|
|
self.__io.writeln(' Adding files ...') |
|
144
|
|
|
self.__io.writeln('') |
|
145
|
|
|
|
|
146
|
|
|
hst_id = DataLayer.instance.get_host_id(self.__host) |
|
147
|
|
|
bck_id = DataLayer.instance.get_bck_id(hst_id, self.__backup_no) |
|
148
|
|
|
|
|
149
|
|
|
file_count = DataLayer.instance.backup_prepare_required_clone_pool_files(bck_id) |
|
150
|
|
|
progress = ProgressBar(self.__io.output, file_count) |
|
151
|
|
|
|
|
152
|
|
|
total_size = 0 |
|
153
|
|
|
file_count = 0 |
|
154
|
|
|
for rows in DataLayer.instance.backup_yield_required_clone_pool_files(): |
|
155
|
|
|
for row in rows: |
|
156
|
|
|
total_size += self.__copy_pool_file(row['bpl_dir'], row['bpl_name'], row['bpl_inode_original']) |
|
157
|
|
|
file_count += 1 |
|
158
|
|
|
progress.advance() |
|
159
|
|
|
|
|
160
|
|
|
progress.finish() |
|
161
|
|
|
|
|
162
|
|
|
self.__io.writeln('') |
|
163
|
|
|
self.__io.writeln(' Number of files copied: {}'.format(file_count)) |
|
164
|
|
|
self.__io.writeln(' Total bytes copied : {} ({}B) '.format(sizeof_fmt(total_size), total_size)) |
|
165
|
|
|
self.__io.writeln('') |
|
166
|
|
|
|
|
167
|
|
|
# ------------------------------------------------------------------------------------------------------------------ |
|
168
|
|
|
def __clone_backup(self) -> None: |
|
169
|
|
|
""" |
|
170
|
|
|
Clones the backup. |
|
171
|
|
|
""" |
|
172
|
|
|
self.__io.section('Clone backup') |
|
173
|
|
|
self.__io.writeln(' Populating ...') |
|
174
|
|
|
self.__io.writeln('') |
|
175
|
|
|
|
|
176
|
|
|
hst_id = DataLayer.instance.get_host_id(self.__host) |
|
177
|
|
|
bck_id = DataLayer.instance.get_bck_id(hst_id, int(self.__backup_no)) |
|
178
|
|
|
DataLayer.instance.backup_set_in_progress(bck_id, 1) |
|
179
|
|
|
|
|
180
|
|
|
backup_dir_clone = Config.instance.backup_dir_clone(self.__host, self.__backup_no) |
|
181
|
|
|
if os.path.exists(backup_dir_clone): |
|
182
|
|
|
shutil.rmtree(backup_dir_clone) |
|
183
|
|
|
os.makedirs(backup_dir_clone) |
|
184
|
|
|
|
|
185
|
|
|
backup_dir_original = Config.instance.backup_dir_original(self.__host, self.__backup_no) |
|
186
|
|
|
top_dir_clone = Config.instance.top_dir_clone |
|
187
|
|
|
|
|
188
|
|
|
file_count = DataLayer.instance.backup_prepare_tree(bck_id) |
|
189
|
|
|
progress = ProgressBar(self.__io.output, file_count) |
|
190
|
|
|
|
|
191
|
|
|
file_count = 0 |
|
192
|
|
|
link_count = 0 |
|
193
|
|
|
dir_count = 0 |
|
194
|
|
|
for rows in DataLayer.instance.backup_yield_tree(): |
|
195
|
|
|
for row in rows: |
|
196
|
|
|
if row['bbt_dir'] is None: |
|
197
|
|
|
row['bbt_dir'] = '' |
|
198
|
|
|
|
|
199
|
|
|
target_clone = os.path.join(backup_dir_clone, row['bbt_dir'], row['bbt_name']) |
|
200
|
|
|
|
|
201
|
|
|
if row['bpl_inode_original']: |
|
202
|
|
|
# Entry is a file linked to the pool. |
|
203
|
|
|
source_clone = os.path.join(top_dir_clone, row['bpl_dir'], row['bpl_name']) |
|
204
|
|
|
self.__io.log_very_verbose( |
|
205
|
|
|
'Linking to <fso>{}</fso> from <fso>{}</fso>'.format(source_clone, target_clone)) |
|
206
|
|
|
os.link(source_clone, target_clone) |
|
207
|
|
|
link_count += 1 |
|
208
|
|
|
|
|
209
|
|
|
elif row['bbt_inode_original']: |
|
210
|
|
|
# Entry is a file not linked to the pool. |
|
211
|
|
|
source_original = os.path.join(backup_dir_original, row['bbt_dir'], row['bbt_name']) |
|
212
|
|
|
self.__io.log_very_verbose('Copying <fso>{}</fso> to <fso>{}</fso>'.format(source_original, |
|
213
|
|
|
target_clone)) |
|
214
|
|
|
shutil.copy2(source_original, target_clone) |
|
215
|
|
|
file_count += 1 |
|
216
|
|
|
else: |
|
217
|
|
|
# Entry is a directory |
|
218
|
|
|
os.mkdir(target_clone) |
|
219
|
|
|
dir_count += 1 |
|
220
|
|
|
|
|
221
|
|
|
progress.advance() |
|
222
|
|
|
|
|
223
|
|
|
progress.finish() |
|
224
|
|
|
|
|
225
|
|
|
DataLayer.instance.backup_set_in_progress(bck_id, 0) |
|
226
|
|
|
|
|
227
|
|
|
self.__io.writeln('') |
|
228
|
|
|
self.__io.writeln(' Number of files copied : {}'.format(file_count)) |
|
229
|
|
|
self.__io.writeln(' Number of hardlinks created : {}'.format(link_count)) |
|
230
|
|
|
self.__io.writeln(' Number of directories created: {}'.format(dir_count)) |
|
231
|
|
|
self.__io.writeln('') |
|
232
|
|
|
|
|
233
|
|
|
# ------------------------------------------------------------------------------------------------------------------ |
|
234
|
|
|
def clone_backup(self, host: str, backup_no: int) -> None: |
|
235
|
|
|
""" |
|
236
|
|
|
Clones a backup of a host. |
|
237
|
|
|
""" |
|
238
|
|
|
self.__host = host |
|
239
|
|
|
self.__backup_no = backup_no |
|
240
|
|
|
|
|
241
|
|
|
backup_dir_original = Config.instance.backup_dir_original(host, backup_no) |
|
242
|
|
|
pre_scan_csv_filename = os.path.join(backup_dir_original, 'backuppc-clone.csv') |
|
243
|
|
|
if os.path.isfile(pre_scan_csv_filename): |
|
244
|
|
|
self.__import_pre_scan_csv(pre_scan_csv_filename) |
|
245
|
|
|
else: |
|
246
|
|
|
csv_filename = os.path.join(Config.instance.tmp_dir_clone, 'backup-{}-{}.csv'.format(host, backup_no)) |
|
247
|
|
|
self.__scan_host_backup(csv_filename) |
|
248
|
|
|
self.__import_host_scan_csv(csv_filename) |
|
249
|
|
|
|
|
250
|
|
|
self.__update_clone_pool() |
|
251
|
|
|
self.__clone_backup() |
|
252
|
|
|
|
|
253
|
|
|
# ---------------------------------------------------------------------------------------------------------------------- |
|
254
|
|
|
|