-
Notifications
You must be signed in to change notification settings - Fork 0
/
asr.py
190 lines (167 loc) · 6.37 KB
/
asr.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
import logging
import os
import time
from typing import Optional
from base_util import (
get_asset_info,
asr_output_dir,
save_provenance,
PROVENANCE_JSON_FILE,
)
from config import (
s3_endpoint_url,
w_word_timestamps,
w_device,
w_model,
w_beam_size,
w_best_of,
w_vad,
)
from download import download_uri
from whisper import run_asr, WHISPER_JSON_FILE
from s3_util import S3Store, parse_s3_uri
from base_util import remove_all_input_output
from transcode import try_transcode
from daan_transcript import generate_daan_transcript, DAAN_JSON_FILE
logger = logging.getLogger(__name__)
# TODO: Get commit hash and use it as version in prov
# (prev impl didn't work)
version = ""
# if os.path.exists("git_commit"):
# with open("git_commit", "r") as f:
# for line in f:
# version = line.strip()
def run(input_uri: str, output_uri: str, model=None) -> Optional[str]:
logger.info(f"Processing {input_uri} (save to --> {output_uri})")
start_time = time.time()
prov_steps = [] # track provenance
# 1. download input
result = download_uri(input_uri)
logger.info(result)
if result.error != "":
logger.error("Could not obtain input, quitting...")
return result.error
prov_steps.append(result.provenance)
input_path = result.file_path
asset_id, extension = get_asset_info(input_path)
output_path = asr_output_dir(input_path)
# 2. check if the input file is suitable for processing any further
transcode_output = try_transcode(input_path, asset_id, extension)
if transcode_output.error != "":
logger.error(
"The transcode failed to yield a valid file to continue with, quitting..."
)
remove_all_input_output(output_path)
return transcode_output.error
else:
input_path = transcode_output.transcoded_file_path
prov_steps.append(transcode_output.provenance)
# 3. run ASR
if not asr_already_done(output_path):
logger.info("No Whisper transcript found")
whisper_prov_or_error = run_asr(input_path, output_path, model)
if isinstance(whisper_prov_or_error, dict):
prov_steps.append(whisper_prov_or_error)
else:
remove_all_input_output(output_path)
return whisper_prov_or_error
else:
logger.info(f"Whisper transcript already present in {output_path}")
provenance = {
"activity_name": "Whisper transcript already exists",
"activity_description": "",
"processing_time_ms": "",
"start_time_unix": "",
"parameters": [],
"software_version": "",
"input_data": "",
"output_data": "",
"steps": [],
}
prov_steps.append(provenance)
# 4. generate JSON transcript
if not daan_transcript_already_done(output_path):
logger.info("No DAAN transcript found")
daan_prov = generate_daan_transcript(output_path)
if daan_prov:
prov_steps.append(daan_prov)
else:
logger.error("Could not generate DAAN transcript")
remove_all_input_output(output_path)
return "DAAN Transcript failure: Could not generate DAAN transcript"
else:
logger.info(f"DAAN transcript already present in {output_path}")
provenance = {
"activity_name": "DAAN transcript already exists",
"activity_description": "",
"processing_time_ms": "",
"start_time_unix": "",
"parameters": [],
"software_version": "",
"input_data": "",
"output_data": "",
"steps": [],
}
prov_steps.append(provenance)
end_time = (time.time() - start_time) * 1000
final_prov = {
"activity_name": "Whisper ASR Worker",
"activity_description": "Worker that gets a video/audio file as input and outputs JSON transcripts in various formats",
"processing_time_ms": end_time,
"start_time_unix": start_time,
"parameters": {
"word_timestamps": w_word_timestamps,
"device": w_device,
"vad": w_vad,
"model": w_model,
"beam_size": w_beam_size,
"best_of": w_best_of,
},
"software_version": version,
"input_data": input_uri,
"output_data": output_uri if output_uri else output_path,
"steps": prov_steps,
}
prov_success = save_provenance(final_prov, output_path)
if not prov_success:
logger.error("Could not save the provenance")
remove_all_input_output(output_path)
return "Provenance failure: Could not save the provenance"
# 5. transfer output
if output_uri:
success = transfer_asr_output(output_path, output_uri)
if not success:
logger.error("Could not upload output to S3")
remove_all_input_output(output_path)
return "Upload failure: Could not upload output to S3"
else:
logger.info("No output_uri specified, so all is done")
remove_all_input_output(output_path)
return None
# if S3 output_uri is supplied transfers data to S3 location
def transfer_asr_output(output_path: str, output_uri: str) -> bool:
logger.info(f"Transferring {output_path} to S3 (destination={output_uri})")
if not s3_endpoint_url:
logger.warning("Transfer to S3 configured without an S3_ENDPOINT_URL!")
return False
s3_bucket, s3_folder_in_bucket = parse_s3_uri(output_uri)
s3 = S3Store(s3_endpoint_url)
return s3.transfer_to_s3(
s3_bucket,
s3_folder_in_bucket,
[
os.path.join(output_path, DAAN_JSON_FILE),
os.path.join(output_path, WHISPER_JSON_FILE),
os.path.join(output_path, PROVENANCE_JSON_FILE),
],
)
# check if there is a whisper-transcript.json
def asr_already_done(output_dir: str) -> bool:
whisper_transcript = os.path.join(output_dir, WHISPER_JSON_FILE)
logger.info(f"Checking existence of {whisper_transcript}")
return os.path.exists(os.path.join(output_dir, WHISPER_JSON_FILE))
# check if there is a daan-es-transcript.json
def daan_transcript_already_done(output_dir: str) -> bool:
daan_transcript = os.path.join(output_dir, DAAN_JSON_FILE)
logger.info(f"Checking existence of {daan_transcript}")
return os.path.exists(os.path.join(output_dir, DAAN_JSON_FILE))