Datasets:
Upload 4 files
Browse files- TCUPY.py +955 -0
- TMIDIX.py +290 -17
- TPLOTS.py +168 -15
- monster_search_and_filter.py +2002 -0
TCUPY.py
ADDED
|
@@ -0,0 +1,955 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#! /usr/bin/python3
|
| 2 |
+
|
| 3 |
+
r'''############################################################################
|
| 4 |
+
################################################################################
|
| 5 |
+
#
|
| 6 |
+
#
|
| 7 |
+
# Tegridy Cupy Python Module (TCUPY)
|
| 8 |
+
# Version 1.0
|
| 9 |
+
#
|
| 10 |
+
# Project Los Angeles
|
| 11 |
+
#
|
| 12 |
+
# Tegridy Code 2025
|
| 13 |
+
#
|
| 14 |
+
# https://github.com/asigalov61/tegridy-tools
|
| 15 |
+
#
|
| 16 |
+
#
|
| 17 |
+
################################################################################
|
| 18 |
+
#
|
| 19 |
+
# Copyright 2024 Project Los Angeles / Tegridy Code
|
| 20 |
+
#
|
| 21 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 22 |
+
# you may not use this file except in compliance with the License.
|
| 23 |
+
# You may obtain a copy of the License at
|
| 24 |
+
#
|
| 25 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 26 |
+
#
|
| 27 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 28 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 29 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 30 |
+
# See the License for the specific language governing permissions and
|
| 31 |
+
# limitations under the License.
|
| 32 |
+
#
|
| 33 |
+
################################################################################
|
| 34 |
+
################################################################################
|
| 35 |
+
#
|
| 36 |
+
# Critical dependencies
|
| 37 |
+
#
|
| 38 |
+
# !pip install cupy-cuda12x
|
| 39 |
+
# !pip install numpy==1.24.4
|
| 40 |
+
#
|
| 41 |
+
################################################################################
|
| 42 |
+
'''
|
| 43 |
+
|
| 44 |
+
################################################################################
|
| 45 |
+
|
| 46 |
+
import cupy as cp
|
| 47 |
+
import numpy as np
|
| 48 |
+
|
| 49 |
+
from collections import defaultdict, deque
|
| 50 |
+
from typing import Optional, Tuple, Dict, Any, List
|
| 51 |
+
|
| 52 |
+
################################################################################
|
| 53 |
+
|
| 54 |
+
# Constants
|
| 55 |
+
MEMORY_LEN = 12 # Autoregressive context length
|
| 56 |
+
SEQUENCE_LENGTH = 32 # Each sequence has 24 triplets
|
| 57 |
+
|
| 58 |
+
# Baseline penalty values:
|
| 59 |
+
REPETITION_PENALTY = (1.0, 1.0, 1.0) # base repetition penalty per element
|
| 60 |
+
SPIKE_PENALTY_STRENGTH = (1.0, 1.0, 1.0) # base spike penalty strength per element
|
| 61 |
+
SPIKE_SIGMA = (1.0, 1.0, 1.0) # baseline sigma value per element (minimum allowed)
|
| 62 |
+
|
| 63 |
+
###################################################################################
|
| 64 |
+
|
| 65 |
+
def find_numpy_array(src_array, trg_array):
|
| 66 |
+
|
| 67 |
+
"""
|
| 68 |
+
Finds 1D numpy array in 2D numpy array
|
| 69 |
+
"""
|
| 70 |
+
|
| 71 |
+
match_mask = np.all(src_array == trg_array, axis=1)
|
| 72 |
+
|
| 73 |
+
return np.where(match_mask)[0]
|
| 74 |
+
|
| 75 |
+
###################################################################################
|
| 76 |
+
|
| 77 |
+
def vertical_list_search(src_list, trg_list):
|
| 78 |
+
|
| 79 |
+
"""
|
| 80 |
+
For each vertical window of consecutive rows of height len(trg_list) in src_list,
|
| 81 |
+
this function checks whether for every offset j (0 <= j < len(trg_list)) the row
|
| 82 |
+
at index (window_start + j) contains trg_list[j].
|
| 83 |
+
|
| 84 |
+
It returns a list of windows (each a list of consecutive row indices) that meet this condition.
|
| 85 |
+
"""
|
| 86 |
+
|
| 87 |
+
if not src_list or not trg_list:
|
| 88 |
+
return []
|
| 89 |
+
|
| 90 |
+
n = len(src_list)
|
| 91 |
+
k = len(trg_list)
|
| 92 |
+
|
| 93 |
+
num_windows = n - k + 1
|
| 94 |
+
|
| 95 |
+
if num_windows <= 0:
|
| 96 |
+
return []
|
| 97 |
+
|
| 98 |
+
# Determine the maximum row length.
|
| 99 |
+
max_len = max(len(row) for row in src_list)
|
| 100 |
+
|
| 101 |
+
# Determine a fill value guaranteed to be less than any valid value.
|
| 102 |
+
global_min = min(min(row) for row in src_list if row)
|
| 103 |
+
fill_value = global_min - 1
|
| 104 |
+
|
| 105 |
+
# Build a padded 2D array A (shape n x max_len) from src_list.
|
| 106 |
+
A = np.full((n, max_len), fill_value, dtype=np.int64)
|
| 107 |
+
for i, row in enumerate(src_list):
|
| 108 |
+
L = len(row)
|
| 109 |
+
A[i, :L] = row
|
| 110 |
+
|
| 111 |
+
# For each unique target in trg_list, compute a Boolean vector of length n.
|
| 112 |
+
# present[t][i] will be True if A[i, :] contains t, else False.
|
| 113 |
+
unique_targets = set(trg_list)
|
| 114 |
+
|
| 115 |
+
present_dict = {}
|
| 116 |
+
|
| 117 |
+
for t in unique_targets:
|
| 118 |
+
# Compute along axis=1 so that for each row we see if any element equals t.
|
| 119 |
+
present_dict[t] = np.any(A == t, axis=1)
|
| 120 |
+
|
| 121 |
+
# Build a Boolean array B of shape (k, num_windows) where for each offset j,
|
| 122 |
+
# B[j, s] = present_dict[ trg_list[j] ][s + j] for each window starting index s.
|
| 123 |
+
B = np.empty((k, num_windows), dtype=bool)
|
| 124 |
+
|
| 125 |
+
for j in range(k):
|
| 126 |
+
t = trg_list[j]
|
| 127 |
+
# For a vertical window starting at s, row s+j should contain t.
|
| 128 |
+
B[j, :] = present_dict[t][j: j + num_windows]
|
| 129 |
+
|
| 130 |
+
# A window is valid if all k rows in that window contain the required target.
|
| 131 |
+
valid_windows_mask = np.all(B, axis=0)
|
| 132 |
+
valid_starts = np.nonzero(valid_windows_mask)[0]
|
| 133 |
+
|
| 134 |
+
# Create output windows (each as a list of consecutive row indices).
|
| 135 |
+
result = [list(range(s, s + k)) for s in valid_starts]
|
| 136 |
+
|
| 137 |
+
return result
|
| 138 |
+
|
| 139 |
+
|
| 140 |
+
###################################################################################
|
| 141 |
+
|
| 142 |
+
def pack_sequences(train_data, pad_val=-1):
|
| 143 |
+
"""
|
| 144 |
+
Packs a list of variable-length token sequences into a 2D CuPy array.
|
| 145 |
+
|
| 146 |
+
This version computes lengths and builds the padded array and mask entirely on GPU.
|
| 147 |
+
It converts each sequence into a CuPy array, concatenates them, and assigns tokens in one shot.
|
| 148 |
+
|
| 149 |
+
Returns:
|
| 150 |
+
batch: a CuPy array of shape (n, max_len)
|
| 151 |
+
lengths: a CuPy array of shape (n,) containing each sequence's length.
|
| 152 |
+
"""
|
| 153 |
+
n = len(train_data)
|
| 154 |
+
# Compute lengths of each sequence and convert to a CuPy array.
|
| 155 |
+
lengths = cp.array([len(seq) for seq in train_data], dtype=cp.int64)
|
| 156 |
+
max_len_val = int(cp.max(lengths).get())
|
| 157 |
+
# Allocate the padded 2D array filled with pad_val.
|
| 158 |
+
batch = cp.full((n, max_len_val), pad_val, dtype=cp.int64)
|
| 159 |
+
# Create a boolean mask: for each row, positions less than the sequence length are valid.
|
| 160 |
+
mask = cp.arange(max_len_val).reshape(1, max_len_val) < lengths.reshape(n, 1)
|
| 161 |
+
# Convert each sequence to a CuPy array and concatenate them.
|
| 162 |
+
sequences = [cp.array(seq, dtype=cp.int64) for seq in train_data]
|
| 163 |
+
flat = cp.concatenate(sequences)
|
| 164 |
+
# Fill in the valid positions.
|
| 165 |
+
batch[mask] = flat
|
| 166 |
+
return batch, lengths
|
| 167 |
+
|
| 168 |
+
###################################################################################
|
| 169 |
+
|
| 170 |
+
def count_best_pair_gpu(batch, lengths, factor, pad_val=-1):
|
| 171 |
+
"""
|
| 172 |
+
Given the entire GPU-resident packed data, compute the most frequent
|
| 173 |
+
adjacent pair (encoded as: pair_val = first * factor + second) on GPU.
|
| 174 |
+
"""
|
| 175 |
+
n, L = batch.shape
|
| 176 |
+
cols = cp.arange(L - 1, dtype=cp.int64)
|
| 177 |
+
cols_expanded = cp.broadcast_to(cols, (n, L - 1))
|
| 178 |
+
valid_mask = cols_expanded < cp.reshape(lengths, (n, 1)) - 1
|
| 179 |
+
|
| 180 |
+
first_tokens = batch[:, :L - 1]
|
| 181 |
+
second_tokens = batch[:, 1:L]
|
| 182 |
+
valid_first = first_tokens[valid_mask]
|
| 183 |
+
valid_second = second_tokens[valid_mask]
|
| 184 |
+
|
| 185 |
+
pairs = valid_first * factor + valid_second
|
| 186 |
+
if pairs.size == 0:
|
| 187 |
+
return None
|
| 188 |
+
|
| 189 |
+
sorted_pairs = cp.sort(pairs)
|
| 190 |
+
diff = cp.diff(sorted_pairs)
|
| 191 |
+
boundaries = cp.nonzero(diff)[0] + 1
|
| 192 |
+
group_starts = cp.concatenate([cp.array([0], dtype=cp.int64), boundaries])
|
| 193 |
+
group_ends = cp.concatenate([boundaries, cp.array([sorted_pairs.size], dtype=cp.int64)])
|
| 194 |
+
group_counts = group_ends - group_starts
|
| 195 |
+
|
| 196 |
+
max_idx = int(cp.argmax(group_counts))
|
| 197 |
+
best_pair_enc = int(sorted_pairs[group_starts[max_idx]])
|
| 198 |
+
best_freq = int(group_counts[max_idx])
|
| 199 |
+
first = best_pair_enc // factor
|
| 200 |
+
second = best_pair_enc % factor
|
| 201 |
+
return (first, second, best_freq)
|
| 202 |
+
|
| 203 |
+
###################################################################################
|
| 204 |
+
|
| 205 |
+
merge_kernel_code = r'''
|
| 206 |
+
extern "C" __global__
|
| 207 |
+
void merge_pair_kernel(const long* input, long* output,
|
| 208 |
+
const long* input_lengths, long* output_lengths,
|
| 209 |
+
const long num_rows, const long num_cols,
|
| 210 |
+
const long a, const long b, const long new_token,
|
| 211 |
+
const long pad_val) {
|
| 212 |
+
int row = blockIdx.x * blockDim.x + threadIdx.x;
|
| 213 |
+
if (row >= num_rows) return;
|
| 214 |
+
long in_length = input_lengths[row];
|
| 215 |
+
long out_idx = 0;
|
| 216 |
+
bool skip_next = false;
|
| 217 |
+
for (long i = 0; i < in_length; i++) {
|
| 218 |
+
if (skip_next) {
|
| 219 |
+
skip_next = false;
|
| 220 |
+
continue;
|
| 221 |
+
}
|
| 222 |
+
long token = input[row * num_cols + i];
|
| 223 |
+
if (i < in_length - 1 && token == a && input[row * num_cols + i + 1] == b) {
|
| 224 |
+
output[row * num_cols + out_idx] = new_token;
|
| 225 |
+
out_idx++;
|
| 226 |
+
skip_next = true;
|
| 227 |
+
} else {
|
| 228 |
+
output[row * num_cols + out_idx] = token;
|
| 229 |
+
out_idx++;
|
| 230 |
+
}
|
| 231 |
+
}
|
| 232 |
+
output_lengths[row] = out_idx;
|
| 233 |
+
for (long j = out_idx; j < num_cols; j++) {
|
| 234 |
+
output[row * num_cols + j] = pad_val;
|
| 235 |
+
}
|
| 236 |
+
}
|
| 237 |
+
'''
|
| 238 |
+
merge_kernel = cp.RawKernel(merge_kernel_code, 'merge_pair_kernel')
|
| 239 |
+
|
| 240 |
+
###################################################################################
|
| 241 |
+
|
| 242 |
+
def learn_bpe_codes_gpu(train_data, vocab_size=4096, max_merges=None, pad_val=-1):
|
| 243 |
+
"""
|
| 244 |
+
Learn BPE merge rules completely on GPU.
|
| 245 |
+
|
| 246 |
+
The training data is packed once (using the vectorized pack_sequences).
|
| 247 |
+
On each merge iteration, the best adjacent pair is computed on GPU and then merged
|
| 248 |
+
into a new token via a custom merge kernel (with double-buffering).
|
| 249 |
+
|
| 250 |
+
Returns:
|
| 251 |
+
codes: a list of merge rules as ((first, second), new_token)
|
| 252 |
+
final_data: the merged training data (list of sequences)
|
| 253 |
+
"""
|
| 254 |
+
# Pack the entire dataset onto GPU.
|
| 255 |
+
batch, lengths = pack_sequences(train_data, pad_val)
|
| 256 |
+
n, L = batch.shape
|
| 257 |
+
|
| 258 |
+
# Initialize vocabulary and the next available token.
|
| 259 |
+
initial_vocab = {token for seq in train_data for token in seq}
|
| 260 |
+
next_token = max(initial_vocab) + 1
|
| 261 |
+
codes = []
|
| 262 |
+
merge_count = 0
|
| 263 |
+
pbar = tqdm.tqdm(total=max_merges if max_merges is not None else None,
|
| 264 |
+
desc="Learning BPE Codes (GPU)", leave=True)
|
| 265 |
+
|
| 266 |
+
# Preallocate buffers for double-buffering.
|
| 267 |
+
work_batch = cp.empty_like(batch)
|
| 268 |
+
work_lengths = cp.empty_like(lengths)
|
| 269 |
+
input_batch = batch
|
| 270 |
+
input_lengths = lengths
|
| 271 |
+
|
| 272 |
+
threads_per_block = 128
|
| 273 |
+
blocks = (n + threads_per_block - 1) // threads_per_block
|
| 274 |
+
|
| 275 |
+
while next_token < vocab_size and (max_merges is None or merge_count < max_merges):
|
| 276 |
+
# Early stop if all sequences have collapsed (checked on GPU).
|
| 277 |
+
if bool(cp.all(input_lengths == 1)):
|
| 278 |
+
pbar.write("All sequences have collapsed; stopping early.")
|
| 279 |
+
break
|
| 280 |
+
|
| 281 |
+
factor = next_token # by construction, every token is < next_token
|
| 282 |
+
best = count_best_pair_gpu(input_batch, input_lengths, factor, pad_val)
|
| 283 |
+
if best is None:
|
| 284 |
+
pbar.write("No mergeable pairs found; stopping early.")
|
| 285 |
+
break
|
| 286 |
+
|
| 287 |
+
best_pair = (best[0], best[1])
|
| 288 |
+
best_freq = best[2]
|
| 289 |
+
if best_freq < 2:
|
| 290 |
+
pbar.write("Best pair frequency is less than 2; stopping early.")
|
| 291 |
+
break
|
| 292 |
+
|
| 293 |
+
codes.append((best_pair, next_token))
|
| 294 |
+
|
| 295 |
+
# Launch the merge kernel.
|
| 296 |
+
merge_kernel((blocks,), (threads_per_block,),
|
| 297 |
+
(input_batch,
|
| 298 |
+
work_batch,
|
| 299 |
+
input_lengths,
|
| 300 |
+
work_lengths,
|
| 301 |
+
cp.int64(n),
|
| 302 |
+
cp.int64(L),
|
| 303 |
+
cp.int64(best_pair[0]),
|
| 304 |
+
cp.int64(best_pair[1]),
|
| 305 |
+
cp.int64(next_token),
|
| 306 |
+
cp.int64(pad_val)))
|
| 307 |
+
# Swap buffers for double-buffering.
|
| 308 |
+
input_batch, work_batch = work_batch, input_batch
|
| 309 |
+
input_lengths, work_lengths = work_lengths, input_lengths
|
| 310 |
+
|
| 311 |
+
next_token += 1
|
| 312 |
+
merge_count += 1
|
| 313 |
+
pbar.update(1)
|
| 314 |
+
pbar.close()
|
| 315 |
+
|
| 316 |
+
final_batch = cp.asnumpy(input_batch)
|
| 317 |
+
final_lengths = cp.asnumpy(input_lengths)
|
| 318 |
+
final_data = [final_batch[i, :final_lengths[i]].tolist() for i in range(n)]
|
| 319 |
+
return codes, final_data
|
| 320 |
+
|
| 321 |
+
###################################################################################
|
| 322 |
+
|
| 323 |
+
fused_merge_kernel_code = r'''
|
| 324 |
+
extern "C" __global__
|
| 325 |
+
void fused_merge_kernel(long* data_in, long* data_out, long* lengths, const long pad_val,
|
| 326 |
+
const long num_rows, const long max_len, const long num_merges, const long* merge_rules) {
|
| 327 |
+
int row = blockIdx.x * blockDim.x + threadIdx.x;
|
| 328 |
+
if (row >= num_rows) return;
|
| 329 |
+
long base = row * max_len;
|
| 330 |
+
long cur_len = lengths[row];
|
| 331 |
+
long* cur = data_in + base;
|
| 332 |
+
long* other = data_out + base;
|
| 333 |
+
// Process each merge rule sequentially.
|
| 334 |
+
for (int m = 0; m < num_merges; m++) {
|
| 335 |
+
long a = merge_rules[3 * m];
|
| 336 |
+
long b = merge_rules[3 * m + 1];
|
| 337 |
+
long new_token = merge_rules[3 * m + 2];
|
| 338 |
+
long out_idx = 0;
|
| 339 |
+
for (int i = 0; i < cur_len; i++) {
|
| 340 |
+
if (i < cur_len - 1 && cur[i] == a && cur[i+1] == b) {
|
| 341 |
+
other[out_idx] = new_token;
|
| 342 |
+
out_idx++;
|
| 343 |
+
i++; // Skip the next token.
|
| 344 |
+
} else {
|
| 345 |
+
other[out_idx] = cur[i];
|
| 346 |
+
out_idx++;
|
| 347 |
+
}
|
| 348 |
+
}
|
| 349 |
+
cur_len = out_idx;
|
| 350 |
+
// Swap pointers for the next merge.
|
| 351 |
+
long* temp = cur;
|
| 352 |
+
cur = other;
|
| 353 |
+
other = temp;
|
| 354 |
+
}
|
| 355 |
+
lengths[row] = cur_len;
|
| 356 |
+
// Pad the remaining positions with pad_val.
|
| 357 |
+
for (int i = cur_len; i < max_len; i++) {
|
| 358 |
+
cur[i] = pad_val;
|
| 359 |
+
}
|
| 360 |
+
// If the final result is not in data_in, copy back.
|
| 361 |
+
if (cur != data_in + base) {
|
| 362 |
+
for (int i = 0; i < cur_len; i++) {
|
| 363 |
+
data_in[base + i] = cur[i];
|
| 364 |
+
}
|
| 365 |
+
}
|
| 366 |
+
}
|
| 367 |
+
'''
|
| 368 |
+
fused_kernel = cp.RawKernel(fused_merge_kernel_code, 'fused_merge_kernel')
|
| 369 |
+
|
| 370 |
+
###################################################################################
|
| 371 |
+
|
| 372 |
+
def retokenize_train_data_fused_gpu(train_data, codes, pad_val=-1):
|
| 373 |
+
"""
|
| 374 |
+
Retokenize training data using the fully fused GPU kernel.
|
| 375 |
+
|
| 376 |
+
The entire training dataset is first packed into GPU memory (using pack_sequences).
|
| 377 |
+
All learned merge rules (provided in 'codes') are applied via a single kernel launch.
|
| 378 |
+
Each GPU thread processes one sequence by applying all merge rules sequentially.
|
| 379 |
+
|
| 380 |
+
Returns:
|
| 381 |
+
tokenized_data: list of retokenized sequences.
|
| 382 |
+
"""
|
| 383 |
+
# Pack the data.
|
| 384 |
+
batch, lengths = pack_sequences(train_data, pad_val)
|
| 385 |
+
n, max_len = batch.shape
|
| 386 |
+
# Build a flattened merge_rules array using CuPy.
|
| 387 |
+
if len(codes) > 0:
|
| 388 |
+
merge_rules_list = [[rule[0][0], rule[0][1], rule[1]] for rule in codes]
|
| 389 |
+
merge_rules_gpu = cp.array(merge_rules_list, dtype=cp.int64)
|
| 390 |
+
merge_rules_gpu = merge_rules_gpu.reshape(-1)
|
| 391 |
+
else:
|
| 392 |
+
merge_rules_gpu = cp.empty((0,), dtype=cp.int64)
|
| 393 |
+
num_merges = merge_rules_gpu.shape[0] // 3
|
| 394 |
+
# Preallocate a scratch buffer.
|
| 395 |
+
scratch = cp.empty_like(batch)
|
| 396 |
+
threads_per_block = 128
|
| 397 |
+
blocks = (n + threads_per_block - 1) // threads_per_block
|
| 398 |
+
# Launch the fused kernel.
|
| 399 |
+
fused_kernel((blocks,), (threads_per_block,),
|
| 400 |
+
(batch, scratch, lengths, cp.int64(pad_val),
|
| 401 |
+
cp.int64(n), cp.int64(max_len), cp.int64(num_merges), merge_rules_gpu))
|
| 402 |
+
final_batch = cp.asnumpy(batch)
|
| 403 |
+
final_lengths = cp.asnumpy(lengths)
|
| 404 |
+
tokenized_data = [final_batch[i, :final_lengths[i]].tolist() for i in range(n)]
|
| 405 |
+
return tokenized_data
|
| 406 |
+
|
| 407 |
+
###################################################################################
|
| 408 |
+
|
| 409 |
+
def bpe_encode(seq, codes):
|
| 410 |
+
"""
|
| 411 |
+
Iteratively encodes a sequence using BPE merge rules provided in a dictionary.
|
| 412 |
+
|
| 413 |
+
Args:
|
| 414 |
+
seq (list): A list of tokens (e.g. integers) representing the input sequence.
|
| 415 |
+
codes (dict): A dictionary mapping token pairs (a tuple of two tokens)
|
| 416 |
+
to a merged token. For example:
|
| 417 |
+
{ (1, 2): 100, (100, 3): 101 }
|
| 418 |
+
|
| 419 |
+
Returns:
|
| 420 |
+
list: The encoded sequence after applying all possible merges.
|
| 421 |
+
|
| 422 |
+
The function repeatedly scans the entire sequence from left to right;
|
| 423 |
+
whenever it finds a contiguous token pair that exists as a key in the
|
| 424 |
+
codes dict, it replaces that pair with the merged token. This pass is
|
| 425 |
+
repeated until no more merges are possible.
|
| 426 |
+
"""
|
| 427 |
+
|
| 428 |
+
if type(codes) == list:
|
| 429 |
+
codes = dict(codes)
|
| 430 |
+
|
| 431 |
+
encoded_seq = seq.copy() # work on a copy so as not to modify the original
|
| 432 |
+
done = False
|
| 433 |
+
while not done:
|
| 434 |
+
new_seq = []
|
| 435 |
+
i = 0
|
| 436 |
+
changed = False
|
| 437 |
+
while i < len(encoded_seq):
|
| 438 |
+
# If a merge is possible, merge the two tokens.
|
| 439 |
+
if i < len(encoded_seq) - 1 and (encoded_seq[i], encoded_seq[i + 1]) in codes:
|
| 440 |
+
new_seq.append(codes[(encoded_seq[i], encoded_seq[i + 1])])
|
| 441 |
+
i += 2 # Skip the next token as it was merged.
|
| 442 |
+
changed = True
|
| 443 |
+
else:
|
| 444 |
+
new_seq.append(encoded_seq[i])
|
| 445 |
+
i += 1
|
| 446 |
+
# If no merges occurred in this pass, exit the loop.
|
| 447 |
+
if not changed:
|
| 448 |
+
done = True
|
| 449 |
+
encoded_seq = new_seq
|
| 450 |
+
return encoded_seq
|
| 451 |
+
|
| 452 |
+
###################################################################################
|
| 453 |
+
|
| 454 |
+
def bpe_decode(seq, codes):
|
| 455 |
+
"""
|
| 456 |
+
Decodes a sequence encoded with BPE merge rules defined in a codes dictionary.
|
| 457 |
+
|
| 458 |
+
Args:
|
| 459 |
+
seq (list): The encoded sequence (a list of tokens).
|
| 460 |
+
codes (dict): A dictionary mapping token pairs to the merged token, used during encoding.
|
| 461 |
+
|
| 462 |
+
Returns:
|
| 463 |
+
list: The fully decoded sequence, with all merged tokens recursively expanded.
|
| 464 |
+
|
| 465 |
+
The function constructs a reverse mapping that converts a merged token back into
|
| 466 |
+
its constituent pair. Each token in the sequence is then recursively expanded.
|
| 467 |
+
"""
|
| 468 |
+
|
| 469 |
+
if type(codes) == list:
|
| 470 |
+
codes = dict(codes)
|
| 471 |
+
|
| 472 |
+
# Build the reverse mapping: key = merged token, value = tuple (original token pair)
|
| 473 |
+
reverse_mapping = {merged: pair for pair, merged in codes.items()}
|
| 474 |
+
|
| 475 |
+
def recursive_expand(token):
|
| 476 |
+
# If the token is a merged token, expand it recursively.
|
| 477 |
+
if token in reverse_mapping:
|
| 478 |
+
a, b = reverse_mapping[token]
|
| 479 |
+
return recursive_expand(a) + recursive_expand(b)
|
| 480 |
+
else:
|
| 481 |
+
return [token]
|
| 482 |
+
|
| 483 |
+
decoded_seq = []
|
| 484 |
+
for token in seq:
|
| 485 |
+
decoded_seq.extend(recursive_expand(token))
|
| 486 |
+
return decoded_seq
|
| 487 |
+
|
| 488 |
+
###################################################################################
|
| 489 |
+
|
| 490 |
+
def ensure_triplet(val: Any, name: str = "") -> Tuple[float, float, float]:
|
| 491 |
+
"""
|
| 492 |
+
Ensure the given parameter is returned as a triplet.
|
| 493 |
+
If provided as a scalar, promote it to a triplet.
|
| 494 |
+
"""
|
| 495 |
+
if np.isscalar(val):
|
| 496 |
+
return (float(val), float(val), float(val))
|
| 497 |
+
elif isinstance(val, (list, tuple)) and len(val) == 3:
|
| 498 |
+
return tuple(float(x) for x in val)
|
| 499 |
+
else:
|
| 500 |
+
raise ValueError(f"{name} must be a scalar or a sequence of 3 numbers.")
|
| 501 |
+
|
| 502 |
+
###################################################################################
|
| 503 |
+
|
| 504 |
+
REP_PENALTY = ensure_triplet(REPETITION_PENALTY, "REPETITION_PENALTY")
|
| 505 |
+
SPIKE_STRENGTH = ensure_triplet(SPIKE_PENALTY_STRENGTH, "SPIKE_PENALTY_STRENGTH")
|
| 506 |
+
SPIKE_SIG = ensure_triplet(SPIKE_SIGMA, "SPIKE_SIGMA")
|
| 507 |
+
|
| 508 |
+
###################################################################################
|
| 509 |
+
|
| 510 |
+
def sliding_window_view_alternative(a: np.ndarray, window_length: int) -> np.ndarray:
|
| 511 |
+
"""
|
| 512 |
+
Create a sliding-window view (without copying) of an array.
|
| 513 |
+
Expected input shape: (n, L, d) and returns: (n, L - window_length + 1, window_length, d)
|
| 514 |
+
"""
|
| 515 |
+
n, L, d = a.shape
|
| 516 |
+
new_shape = (n, L - window_length + 1, window_length, d)
|
| 517 |
+
new_strides = (a.strides[0], a.strides[1], a.strides[1], a.strides[2])
|
| 518 |
+
return np.lib.stride_tricks.as_strided(a, shape=new_shape, strides=new_strides)
|
| 519 |
+
|
| 520 |
+
###################################################################################
|
| 521 |
+
|
| 522 |
+
def build_ngram_mapping(data: np.ndarray, memory_len: int) -> Dict[Any, Dict[Any, int]]:
|
| 523 |
+
"""
|
| 524 |
+
Build an n-gram mapping from a context (a sequence of triplets) to candidate triplets with frequencies.
|
| 525 |
+
"""
|
| 526 |
+
n, L, d = data.shape
|
| 527 |
+
window_length = memory_len + 1 # context (memory) + candidate
|
| 528 |
+
windows = sliding_window_view_alternative(data, window_length)
|
| 529 |
+
# windows shape: (n, L - window_length + 1, window_length, d)
|
| 530 |
+
|
| 531 |
+
# Split windows into context (first memory_len triplets) and candidates (last triplet)
|
| 532 |
+
contexts = windows[:, :, :memory_len, :] # shape: (n, num_windows, memory_len, d)
|
| 533 |
+
candidates = windows[:, :, memory_len, :] # shape: (n, num_windows, d)
|
| 534 |
+
|
| 535 |
+
# Flatten the batch and window dimensions.
|
| 536 |
+
contexts_flat = contexts.reshape(-1, memory_len, d)
|
| 537 |
+
candidates_flat = candidates.reshape(-1, d)
|
| 538 |
+
|
| 539 |
+
mapping = defaultdict(lambda: defaultdict(int))
|
| 540 |
+
total_windows = contexts_flat.shape[0]
|
| 541 |
+
for context_arr, candidate_arr in tqdm.tqdm(
|
| 542 |
+
zip(contexts_flat, candidates_flat),
|
| 543 |
+
total=total_windows,
|
| 544 |
+
desc="Building n-gram mapping"):
|
| 545 |
+
context_key = tuple(map(tuple, context_arr)) # use a tuple of triplets as the key
|
| 546 |
+
candidate_val = tuple(candidate_arr)
|
| 547 |
+
mapping[context_key][candidate_val] += 1
|
| 548 |
+
|
| 549 |
+
return {context: dict(candidates) for context, candidates in mapping.items()}
|
| 550 |
+
|
| 551 |
+
###################################################################################
|
| 552 |
+
|
| 553 |
+
def precompute_mapping_lookup(mapping: Dict[Any, Dict[Any, int]]) -> Dict[Any, Tuple[Tuple[Any, ...], np.ndarray]]:
|
| 554 |
+
"""
|
| 555 |
+
Converts the mapping into a lookup table: context -> (tuple(candidates), frequencies_array).
|
| 556 |
+
"""
|
| 557 |
+
mapping_lookup = {}
|
| 558 |
+
for context, candidate_dict in tqdm.tqdm(mapping.items(), desc="Precomputing lookup"):
|
| 559 |
+
candidates = tuple(candidate_dict.keys())
|
| 560 |
+
frequencies = np.array(list(candidate_dict.values()), dtype=np.float64)
|
| 561 |
+
mapping_lookup[context] = (candidates, frequencies)
|
| 562 |
+
return mapping_lookup
|
| 563 |
+
|
| 564 |
+
###################################################################################
|
| 565 |
+
|
| 566 |
+
def build_training_sequences_set(data: np.ndarray) -> set:
|
| 567 |
+
"""
|
| 568 |
+
Build a set of training sequences (each as a tuple of triplets) for uniqueness checking.
|
| 569 |
+
"""
|
| 570 |
+
return {tuple(map(tuple, seq)) for seq in data}
|
| 571 |
+
|
| 572 |
+
###################################################################################
|
| 573 |
+
|
| 574 |
+
def generate_sequence_optimized(mapping_lookup: Dict[Any, Tuple[Tuple[Any, ...], np.ndarray]],
|
| 575 |
+
training_set: set,
|
| 576 |
+
memory_len: int,
|
| 577 |
+
sequence_length: int = 24,
|
| 578 |
+
max_attempts: int = 1000) -> Optional[Tuple[Tuple[float, float, float], ...]]:
|
| 579 |
+
"""
|
| 580 |
+
Autoregressively generate a new, unique sequence using the precomputed mapping lookup.
|
| 581 |
+
The invariant maintained is: the second element of one triplet is never greater than the first element
|
| 582 |
+
of the following triplet.
|
| 583 |
+
|
| 584 |
+
Two dynamic adjustments are applied for candidate selection:
|
| 585 |
+
|
| 586 |
+
1. **Dynamic Repetition Penalty:**
|
| 587 |
+
For each candidate, count the occurrences of each element in the generated sequence.
|
| 588 |
+
Rather than a fixed penalty, this repetition penalty scales with the ratio
|
| 589 |
+
(current_length / sequence_length). In log-space, it subtracts:
|
| 590 |
+
(current_length / sequence_length) * sum_k(count[k] * log(REP_PENALTY[k])
|
| 591 |
+
2. **Dynamic Spike (Variance) Penalty:**
|
| 592 |
+
For each candidate, compute the squared difference from the running average for each element.
|
| 593 |
+
Use a dynamic sigma that is the maximum between the running standard deviation and the baseline.
|
| 594 |
+
The penalty term for each element is:
|
| 595 |
+
SPIKE_STRENGTH[k] * ((cand[k] - running_avg[k])^2) / (2 * dynamic_sigma[k]^2)
|
| 596 |
+
The overall spike penalty is the sum of the three terms and is subtracted from the candidate’s log frequency.
|
| 597 |
+
|
| 598 |
+
The resulting candidate log score is computed as:
|
| 599 |
+
log(candidate_frequency) - rep_penalty_component - spike_penalty_component
|
| 600 |
+
A numerical stable softmax is then applied over these scores to determine the probability for drawing a candidate.
|
| 601 |
+
|
| 602 |
+
If no candidate passing the invariant is found, the attempt is aborted.
|
| 603 |
+
|
| 604 |
+
Parameters:
|
| 605 |
+
mapping_lookup: Precomputed lookup mapping (context → (candidates, frequencies)).
|
| 606 |
+
training_set: Set of training sequences to ensure uniqueness.
|
| 607 |
+
memory_len: Number of triplets used as context.
|
| 608 |
+
sequence_length: Desired length of the generated sequence.
|
| 609 |
+
max_attempts: Maximum number of generation attempts.
|
| 610 |
+
|
| 611 |
+
Returns:
|
| 612 |
+
A new unique sequence (tuple of triplets) that respects the invariant, or None if not found.
|
| 613 |
+
"""
|
| 614 |
+
mapping_keys = list(mapping_lookup.keys())
|
| 615 |
+
num_keys = len(mapping_keys)
|
| 616 |
+
|
| 617 |
+
for attempt in range(max_attempts):
|
| 618 |
+
# Select a seed context randomly (from training data so that the invariant holds).
|
| 619 |
+
seed = mapping_keys[np.random.randint(0, num_keys)]
|
| 620 |
+
generated_sequence: List[Tuple[float, float, float]] = list(seed)
|
| 621 |
+
valid_generation = True
|
| 622 |
+
|
| 623 |
+
while len(generated_sequence) < sequence_length:
|
| 624 |
+
last_triplet = generated_sequence[-1]
|
| 625 |
+
current_context = tuple(generated_sequence[-memory_len:]) # context as tuple of triplets
|
| 626 |
+
candidate_found = False
|
| 627 |
+
|
| 628 |
+
if current_context in mapping_lookup:
|
| 629 |
+
candidates, frequencies = mapping_lookup[current_context]
|
| 630 |
+
# Filter candidates by invariant:
|
| 631 |
+
# Candidate's first element must be >= last triplet's second element.
|
| 632 |
+
valid_indices = [i for i, cand in enumerate(candidates) if cand[0] >= last_triplet[1]]
|
| 633 |
+
if valid_indices:
|
| 634 |
+
# Filter candidates and their associated frequencies.
|
| 635 |
+
filtered_freqs = frequencies[valid_indices]
|
| 636 |
+
filtered_candidates = [candidates[i] for i in valid_indices]
|
| 637 |
+
|
| 638 |
+
# Convert candidates into a NumPy array for vectorized operations.
|
| 639 |
+
candidate_array = np.array(filtered_candidates, dtype=np.float64) # shape: (n_candidates, 3)
|
| 640 |
+
|
| 641 |
+
# Prepare generation history as array.
|
| 642 |
+
generated_array = np.array(generated_sequence, dtype=np.float64) # shape: (T, 3)
|
| 643 |
+
current_length = generated_array.shape[0]
|
| 644 |
+
|
| 645 |
+
# Running average and standard deviation for dynamic spike adjustment.
|
| 646 |
+
running_avg = np.mean(generated_array, axis=0) # shape: (3,)
|
| 647 |
+
running_std = np.std(generated_array, axis=0) # shape: (3,)
|
| 648 |
+
# Dynamic sigma: ensure a minimum sigma value.
|
| 649 |
+
dynamic_sigma = np.maximum(running_std, np.array(SPIKE_SIG))
|
| 650 |
+
|
| 651 |
+
# --- Compute Repetition Penalty ---
|
| 652 |
+
# For each candidate, count the number of occurrences for each element along the corresponding column.
|
| 653 |
+
rep_counts = np.array([
|
| 654 |
+
[np.sum(generated_array[:, k] == candidate_array[i, k]) for k in range(3)]
|
| 655 |
+
for i in range(candidate_array.shape[0])
|
| 656 |
+
]) # shape: (n_candidates, 3)
|
| 657 |
+
# The repetition penalty in log-space.
|
| 658 |
+
rep_penalty_term = np.sum(rep_counts * np.log(np.array(REP_PENALTY)) *
|
| 659 |
+
(current_length / sequence_length), axis=1) # shape: (n_candidates,)
|
| 660 |
+
|
| 661 |
+
# --- Compute Spike (Variance) Penalty ---
|
| 662 |
+
# Compute the difference per candidate from the running average.
|
| 663 |
+
diff = candidate_array - running_avg # shape: (n_candidates, 3)
|
| 664 |
+
spike_penalty_term = np.sum(np.array(SPIKE_STRENGTH) * (diff**2) / (2 * (dynamic_sigma**2)),
|
| 665 |
+
axis=1) # shape: (n_candidates,)
|
| 666 |
+
|
| 667 |
+
# --- Compute Candidate Log-Scores ---
|
| 668 |
+
# Use np.log on frequencies (they are positive by construction).
|
| 669 |
+
log_freq = np.log(filtered_freqs)
|
| 670 |
+
log_scores = log_freq - rep_penalty_term - spike_penalty_term
|
| 671 |
+
|
| 672 |
+
# --- Softmax in Log-space (stable computation) ---
|
| 673 |
+
max_log = np.max(log_scores)
|
| 674 |
+
exp_scores = np.exp(log_scores - max_log)
|
| 675 |
+
probabilities = exp_scores / np.sum(exp_scores)
|
| 676 |
+
|
| 677 |
+
# Choose the next candidate using advanced probabilities.
|
| 678 |
+
chosen_idx = np.random.choice(len(filtered_candidates), p=probabilities)
|
| 679 |
+
next_triplet = filtered_candidates[chosen_idx]
|
| 680 |
+
candidate_found = True
|
| 681 |
+
|
| 682 |
+
if not candidate_found:
|
| 683 |
+
# Abort this generation attempt if no valid candidate is available.
|
| 684 |
+
valid_generation = False
|
| 685 |
+
break
|
| 686 |
+
|
| 687 |
+
generated_sequence.append(next_triplet)
|
| 688 |
+
|
| 689 |
+
# Ensure the final sequence meets the invariant and is unique.
|
| 690 |
+
if valid_generation and len(generated_sequence) == sequence_length:
|
| 691 |
+
new_sequence = tuple(generated_sequence)
|
| 692 |
+
invariant_ok = all(a[1] <= b[0] for a, b in zip(new_sequence, new_sequence[1:]))
|
| 693 |
+
if invariant_ok and new_sequence not in training_set:
|
| 694 |
+
return new_sequence
|
| 695 |
+
|
| 696 |
+
return None
|
| 697 |
+
|
| 698 |
+
###################################################################################
|
| 699 |
+
|
| 700 |
+
def analyze_generated_sequence(sequence: tuple, mapping_lookup: dict, memory_len: int) -> tuple:
|
| 701 |
+
"""
|
| 702 |
+
Analyze the generated sequence and return several useful statistics
|
| 703 |
+
as both a dictionary and as a nicely formatted string report.
|
| 704 |
+
|
| 705 |
+
Statistics Computed:
|
| 706 |
+
- unigram_diversity: Ratio of unique triplets to total triplets.
|
| 707 |
+
- repetition_rate: Fraction of repeated triplets.
|
| 708 |
+
- bigram_diversity: Ratio of unique consecutive pairs to total pairs.
|
| 709 |
+
- max_consecutive_repetitions: Maximum number of identical consecutive triplets.
|
| 710 |
+
- avg_candidate_probability (overfit rate): For the transitions (using a sliding window of size
|
| 711 |
+
MEMORY_LEN as context followed by candidate), the average probability of the chosen candidate
|
| 712 |
+
as per the training mapping.
|
| 713 |
+
|
| 714 |
+
Additional Analytics:
|
| 715 |
+
- element_stats: For each element (index 0, 1, 2) in a triplet, includes:
|
| 716 |
+
* mean, standard deviation, minimum, maximum, and average consecutive absolute difference.
|
| 717 |
+
- avg_transition_entropy: The average entropy of the candidate distributions (from mapping_lookup)
|
| 718 |
+
for each transition context.
|
| 719 |
+
- context_coverage: The fraction of transitions (based on context of length MEMORY_LEN) that are found
|
| 720 |
+
in the mapping_lookup.
|
| 721 |
+
|
| 722 |
+
Parameters:
|
| 723 |
+
sequence: Generated sequence (tuple of triplets).
|
| 724 |
+
mapping_lookup: Precomputed mapping lookup.
|
| 725 |
+
memory_len: The context length used.
|
| 726 |
+
|
| 727 |
+
Returns:
|
| 728 |
+
A tuple containing:
|
| 729 |
+
(stats_dict, stats_report_string)
|
| 730 |
+
"""
|
| 731 |
+
stats = {}
|
| 732 |
+
seq_len = len(sequence)
|
| 733 |
+
|
| 734 |
+
# --- Basic Statistics ---
|
| 735 |
+
|
| 736 |
+
# Unigram.
|
| 737 |
+
unique_triplets = len(set(sequence))
|
| 738 |
+
stats["unigram_diversity"] = unique_triplets / seq_len
|
| 739 |
+
stats["repetition_rate"] = 1 - (unique_triplets / seq_len)
|
| 740 |
+
|
| 741 |
+
# Bigram.
|
| 742 |
+
bigrams = [(sequence[i], sequence[i+1]) for i in range(seq_len - 1)]
|
| 743 |
+
unique_bigrams = len(set(bigrams))
|
| 744 |
+
stats["bigram_diversity"] = unique_bigrams / (seq_len - 1)
|
| 745 |
+
|
| 746 |
+
# Maximum consecutive repetitions.
|
| 747 |
+
max_consecutive = 1
|
| 748 |
+
current_consecutive = 1
|
| 749 |
+
for i in range(1, seq_len):
|
| 750 |
+
if sequence[i] == sequence[i-1]:
|
| 751 |
+
current_consecutive += 1
|
| 752 |
+
if current_consecutive > max_consecutive:
|
| 753 |
+
max_consecutive = current_consecutive
|
| 754 |
+
else:
|
| 755 |
+
current_consecutive = 1
|
| 756 |
+
stats["max_consecutive_repetitions"] = max_consecutive
|
| 757 |
+
|
| 758 |
+
# Avg Candidate Probability (Overfit Rate)
|
| 759 |
+
overfit_probs = []
|
| 760 |
+
for i in range(memory_len, seq_len):
|
| 761 |
+
context = tuple(sequence[i - memory_len: i])
|
| 762 |
+
candidate = sequence[i]
|
| 763 |
+
if context in mapping_lookup:
|
| 764 |
+
candidates, frequencies = mapping_lookup[context]
|
| 765 |
+
total_freq = np.sum(frequencies)
|
| 766 |
+
try:
|
| 767 |
+
idx = candidates.index(candidate)
|
| 768 |
+
cand_prob = frequencies[idx] / total_freq
|
| 769 |
+
overfit_probs.append(cand_prob)
|
| 770 |
+
except ValueError:
|
| 771 |
+
pass
|
| 772 |
+
stats["avg_candidate_probability"] = np.mean(overfit_probs) if overfit_probs else None
|
| 773 |
+
|
| 774 |
+
# --- Additional Analytics ---
|
| 775 |
+
|
| 776 |
+
# 1. Element-Level Statistics.
|
| 777 |
+
seq_arr = np.array(sequence) # shape: (seq_len, 3)
|
| 778 |
+
element_stats = {}
|
| 779 |
+
for dim in range(seq_arr.shape[1]):
|
| 780 |
+
values = seq_arr[:, dim]
|
| 781 |
+
mean_val = np.mean(values)
|
| 782 |
+
std_val = np.std(values)
|
| 783 |
+
min_val = np.min(values)
|
| 784 |
+
max_val = np.max(values)
|
| 785 |
+
# Calculate average absolute difference between consecutive values:
|
| 786 |
+
diffs = np.abs(np.diff(values))
|
| 787 |
+
avg_diff = np.mean(diffs) if diffs.size > 0 else 0
|
| 788 |
+
element_stats[f"element_{dim}"] = {
|
| 789 |
+
"mean": mean_val,
|
| 790 |
+
"std": std_val,
|
| 791 |
+
"min": min_val,
|
| 792 |
+
"max": max_val,
|
| 793 |
+
"avg_consecutive_diff": avg_diff,
|
| 794 |
+
}
|
| 795 |
+
stats["element_stats"] = element_stats
|
| 796 |
+
|
| 797 |
+
# 2. Transition Entropy:
|
| 798 |
+
entropies = []
|
| 799 |
+
valid_transitions = 0
|
| 800 |
+
for i in range(memory_len, seq_len):
|
| 801 |
+
context = tuple(sequence[i - memory_len: i])
|
| 802 |
+
if context in mapping_lookup:
|
| 803 |
+
candidates, freqs = mapping_lookup[context]
|
| 804 |
+
total_freq = np.sum(freqs)
|
| 805 |
+
if total_freq > 0:
|
| 806 |
+
probs = freqs / total_freq
|
| 807 |
+
# Add a very small constant to avoid log(0)
|
| 808 |
+
epsilon = 1e-10
|
| 809 |
+
entropy = -np.sum(probs * np.log(probs + epsilon))
|
| 810 |
+
entropies.append(entropy)
|
| 811 |
+
valid_transitions += 1
|
| 812 |
+
stats["avg_transition_entropy"] = np.mean(entropies) if entropies else None
|
| 813 |
+
|
| 814 |
+
# 3. Context Coverage:
|
| 815 |
+
total_transitions = seq_len - memory_len
|
| 816 |
+
stats["context_coverage"] = (valid_transitions / total_transitions) if total_transitions > 0 else None
|
| 817 |
+
|
| 818 |
+
# --- Build a Pretty Report String ---
|
| 819 |
+
sep_line = "-" * 60
|
| 820 |
+
lines = []
|
| 821 |
+
lines.append(sep_line)
|
| 822 |
+
lines.append("Sequence Analytics Report:")
|
| 823 |
+
lines.append(sep_line)
|
| 824 |
+
lines.append("Overall Statistics:")
|
| 825 |
+
lines.append(f" Unigram Diversity : {stats['unigram_diversity']:.3f}")
|
| 826 |
+
lines.append(f" Repetition Rate : {stats['repetition_rate']:.3f}")
|
| 827 |
+
lines.append(f" Bigram Diversity : {stats['bigram_diversity']:.3f}")
|
| 828 |
+
lines.append(f" Max Consecutive Repetitions: {stats['max_consecutive_repetitions']}")
|
| 829 |
+
cand_prob = stats["avg_candidate_probability"]
|
| 830 |
+
cand_prob_str = f"{cand_prob:.3f}" if cand_prob is not None else "N/A"
|
| 831 |
+
lines.append(f" Avg Candidate Probability : {cand_prob_str}")
|
| 832 |
+
lines.append("")
|
| 833 |
+
|
| 834 |
+
lines.append("Element-Level Statistics:")
|
| 835 |
+
for dim in sorted(element_stats.keys()):
|
| 836 |
+
ed = element_stats[dim]
|
| 837 |
+
lines.append(f" {dim.capitalize()}:")
|
| 838 |
+
lines.append(f" Mean : {ed['mean']:.3f}")
|
| 839 |
+
lines.append(f" Std Dev : {ed['std']:.3f}")
|
| 840 |
+
lines.append(f" Min : {ed['min']:.3f}")
|
| 841 |
+
lines.append(f" Max : {ed['max']:.3f}")
|
| 842 |
+
lines.append(f" Avg Consecutive Diff : {ed['avg_consecutive_diff']:.3f}")
|
| 843 |
+
lines.append("")
|
| 844 |
+
|
| 845 |
+
lines.append("Transition Statistics:")
|
| 846 |
+
avg_entropy = stats["avg_transition_entropy"]
|
| 847 |
+
entropy_str = f"{avg_entropy:.3f}" if avg_entropy is not None else "N/A"
|
| 848 |
+
lines.append(f" Average Transition Entropy: {entropy_str}")
|
| 849 |
+
cc = stats["context_coverage"]
|
| 850 |
+
cc_str = f"{cc:.3f}" if cc is not None else "N/A"
|
| 851 |
+
lines.append(f" Context Coverage : {cc_str}")
|
| 852 |
+
lines.append(sep_line)
|
| 853 |
+
|
| 854 |
+
stats_report = "\n".join(lines)
|
| 855 |
+
|
| 856 |
+
# Return both the dictionary and the formatted report string.
|
| 857 |
+
return stats, stats_report
|
| 858 |
+
|
| 859 |
+
###################################################################################
|
| 860 |
+
|
| 861 |
+
def autoregressive_generate(start_seq, mel_tones, trg_array, trg_matches_array, num_new_tokens, chunk_len=5):
|
| 862 |
+
|
| 863 |
+
# Convert sequences to NumPy arrays.
|
| 864 |
+
current_seq = np.array(start_seq, dtype=int) # Shape: (num_tokens, token_dim)
|
| 865 |
+
trg_array = np.array(trg_array, dtype=int) # Shape: (num_candidates, 2, token_dim)
|
| 866 |
+
start_len = len(start_seq)
|
| 867 |
+
|
| 868 |
+
midx = start_len-1
|
| 869 |
+
|
| 870 |
+
# Deque for sliding memory of candidate pairs (immutable tuples).
|
| 871 |
+
recent_candidates = deque(maxlen=5)
|
| 872 |
+
|
| 873 |
+
while (len(current_seq) - start_len) < num_new_tokens:
|
| 874 |
+
|
| 875 |
+
midx += 1
|
| 876 |
+
|
| 877 |
+
# Get the last two tokens as context.
|
| 878 |
+
context = current_seq[-(chunk_len-1):] # Shape: (2, token_dim)
|
| 879 |
+
|
| 880 |
+
sli = 0
|
| 881 |
+
msize = 0
|
| 882 |
+
|
| 883 |
+
ctx = context[:, :-1].reshape(1, -1)
|
| 884 |
+
trg_mat_arr = trg_matches_array
|
| 885 |
+
|
| 886 |
+
while msize < 8:
|
| 887 |
+
|
| 888 |
+
print('=== Slice', sli)
|
| 889 |
+
|
| 890 |
+
# Compare context with candidates in trg_array.
|
| 891 |
+
match_mask = np.all(ctx == trg_mat_arr, axis=1)
|
| 892 |
+
match_indices = np.where(match_mask)[0]
|
| 893 |
+
|
| 894 |
+
msize = match_indices.size
|
| 895 |
+
|
| 896 |
+
if msize < 8:
|
| 897 |
+
sli += 1
|
| 898 |
+
ctx = context[:, :-1].reshape(1, -1)[:, sli:]
|
| 899 |
+
trg_mat_arr = trg_matches_array[:, :-sli]
|
| 900 |
+
|
| 901 |
+
if match_indices.size == 0:
|
| 902 |
+
if len(current_seq) > start_len:
|
| 903 |
+
|
| 904 |
+
#tones_chord = sorted([mel_tones[midx], (mel_tones[midx]+7) % 12])
|
| 905 |
+
tones_chord = sorted([mel_tones[midx]])
|
| 906 |
+
new_tuple = [[mel_tones[midx], TMIDIX.ALL_CHORDS_SORTED.index(tones_chord)]]
|
| 907 |
+
current_seq = np.concatenate((current_seq, new_tuple), axis=0)
|
| 908 |
+
print('Subbed', midx)
|
| 909 |
+
continue
|
| 910 |
+
|
| 911 |
+
# From the matching candidates, filter out those whose candidate pair is in recent memory.
|
| 912 |
+
available_candidates = []
|
| 913 |
+
cseen = []
|
| 914 |
+
for idx in match_indices:
|
| 915 |
+
|
| 916 |
+
if idx not in recent_candidates:
|
| 917 |
+
# Convert candidate pair to an immutable tuple
|
| 918 |
+
candidate_pair = tuple(trg_array[idx].tolist())
|
| 919 |
+
if candidate_pair[-1][0] == mel_tones[midx] and candidate_pair[-1][1] not in cseen:
|
| 920 |
+
available_candidates.append((idx, candidate_pair))
|
| 921 |
+
cseen.append(candidate_pair[-1][1])
|
| 922 |
+
|
| 923 |
+
# If all candidates have recently been used, backtrack.
|
| 924 |
+
if len(available_candidates) < 3:
|
| 925 |
+
if len(current_seq) >= start_len:
|
| 926 |
+
#tones_chord = sorted([mel_tones[midx], (mel_tones[midx]+7) % 12])
|
| 927 |
+
tones_chord = sorted([mel_tones[midx]])
|
| 928 |
+
new_tuple = [[mel_tones[midx], TMIDIX.ALL_CHORDS_SORTED.index(tones_chord)]]
|
| 929 |
+
current_seq = np.concatenate((current_seq, new_tuple), axis=0)
|
| 930 |
+
#rev_val = random.choice([-1, -2])
|
| 931 |
+
#current_seq = current_seq[:rev_val]
|
| 932 |
+
#print(midx)
|
| 933 |
+
#midx = len(current_seq)
|
| 934 |
+
#print('Reverted', midx, len(current_seq))
|
| 935 |
+
continue
|
| 936 |
+
|
| 937 |
+
else:
|
| 938 |
+
print(len(available_candidates))
|
| 939 |
+
# Choose one available candidate at random.
|
| 940 |
+
chosen_idx, chosen_pair = available_candidates[np.random.choice(len(available_candidates))]
|
| 941 |
+
new_token = trg_array[chosen_idx][-1] # The second token of the candidate pair.
|
| 942 |
+
|
| 943 |
+
|
| 944 |
+
# Append the new token to the sequence.
|
| 945 |
+
current_seq = np.concatenate((current_seq, new_token[None, :]), axis=0)
|
| 946 |
+
|
| 947 |
+
recent_candidates.append(chosen_idx)
|
| 948 |
+
|
| 949 |
+
print('Gen seq len', len(current_seq))
|
| 950 |
+
|
| 951 |
+
return current_seq
|
| 952 |
+
|
| 953 |
+
###################################################################################
|
| 954 |
+
# This is the end of the TCUPY Python module
|
| 955 |
+
###################################################################################
|
TMIDIX.py
CHANGED
|
@@ -8,7 +8,7 @@ r'''############################################################################
|
|
| 8 |
# Tegridy MIDI X Module (TMIDI X / tee-midi eks)
|
| 9 |
# Version 1.0
|
| 10 |
#
|
| 11 |
-
# NOTE: TMIDI X Module starts after the partial MIDI.py module @ line
|
| 12 |
#
|
| 13 |
# Based upon MIDI.py module v.6.7. by Peter Billam / pjb.com.au
|
| 14 |
#
|
|
@@ -21,19 +21,19 @@ r'''############################################################################
|
|
| 21 |
#
|
| 22 |
###################################################################################
|
| 23 |
###################################################################################
|
| 24 |
-
#
|
| 25 |
#
|
| 26 |
-
#
|
| 27 |
-
#
|
| 28 |
-
#
|
| 29 |
#
|
| 30 |
-
#
|
| 31 |
#
|
| 32 |
-
#
|
| 33 |
-
#
|
| 34 |
-
#
|
| 35 |
-
#
|
| 36 |
-
#
|
| 37 |
###################################################################################
|
| 38 |
###################################################################################
|
| 39 |
#
|
|
@@ -1446,8 +1446,9 @@ def _encode(events_lol, unknown_callback=None, never_add_eot=False,
|
|
| 1446 |
# pjb.com.au
|
| 1447 |
#
|
| 1448 |
# Project Los Angeles
|
| 1449 |
-
# Tegridy Code
|
| 1450 |
-
#
|
|
|
|
| 1451 |
#
|
| 1452 |
###################################################################################
|
| 1453 |
###################################################################################
|
|
@@ -1471,6 +1472,8 @@ import csv
|
|
| 1471 |
|
| 1472 |
import tqdm
|
| 1473 |
|
|
|
|
|
|
|
| 1474 |
from itertools import zip_longest
|
| 1475 |
from itertools import groupby
|
| 1476 |
from collections import Counter
|
|
@@ -1488,6 +1491,10 @@ import math
|
|
| 1488 |
|
| 1489 |
import matplotlib.pyplot as plt
|
| 1490 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1491 |
###################################################################################
|
| 1492 |
#
|
| 1493 |
# Original TMIDI Tegridy helper functions
|
|
@@ -5589,7 +5596,7 @@ def split_melody(enhanced_melody_score_notes,
|
|
| 5589 |
for e in enhanced_melody_score_notes:
|
| 5590 |
dtime = max(0, min(max_score_time, e[1]-pe[1]))
|
| 5591 |
|
| 5592 |
-
if dtime >
|
| 5593 |
if chu:
|
| 5594 |
mel_chunks.append(chu)
|
| 5595 |
chu = []
|
|
@@ -9603,7 +9610,7 @@ def escore_notes_to_text_description(escore_notes,
|
|
| 9603 |
|
| 9604 |
all_patches = [e[6] for e in escore_notes]
|
| 9605 |
|
| 9606 |
-
patches = ordered_set(all_patches)
|
| 9607 |
|
| 9608 |
instruments = [alpha_str(Number2patch[p]) for p in patches if p < 128]
|
| 9609 |
|
|
@@ -11089,7 +11096,273 @@ def escore_notes_pitches_range(escore_notes,
|
|
| 11089 |
return [ -1] * 6
|
| 11090 |
|
| 11091 |
###################################################################################
|
| 11092 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 11093 |
# This is the end of the TMIDI X Python module
|
| 11094 |
-
#
|
| 11095 |
###################################################################################
|
|
|
|
| 8 |
# Tegridy MIDI X Module (TMIDI X / tee-midi eks)
|
| 9 |
# Version 1.0
|
| 10 |
#
|
| 11 |
+
# NOTE: TMIDI X Module starts after the partial MIDI.py module @ line 1438
|
| 12 |
#
|
| 13 |
# Based upon MIDI.py module v.6.7. by Peter Billam / pjb.com.au
|
| 14 |
#
|
|
|
|
| 21 |
#
|
| 22 |
###################################################################################
|
| 23 |
###################################################################################
|
| 24 |
+
# Copyright 2025 Project Los Angeles / Tegridy Code
|
| 25 |
#
|
| 26 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 27 |
+
# you may not use this file except in compliance with the License.
|
| 28 |
+
# You may obtain a copy of the License at
|
| 29 |
#
|
| 30 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 31 |
#
|
| 32 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 33 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 34 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 35 |
+
# See the License for the specific language governing permissions and
|
| 36 |
+
# limitations under the License.
|
| 37 |
###################################################################################
|
| 38 |
###################################################################################
|
| 39 |
#
|
|
|
|
| 1446 |
# pjb.com.au
|
| 1447 |
#
|
| 1448 |
# Project Los Angeles
|
| 1449 |
+
# Tegridy Code 2025
|
| 1450 |
+
#
|
| 1451 |
+
# https://github.com/Tegridy-Code/Project-Los-Angeles
|
| 1452 |
#
|
| 1453 |
###################################################################################
|
| 1454 |
###################################################################################
|
|
|
|
| 1472 |
|
| 1473 |
import tqdm
|
| 1474 |
|
| 1475 |
+
import multiprocessing
|
| 1476 |
+
|
| 1477 |
from itertools import zip_longest
|
| 1478 |
from itertools import groupby
|
| 1479 |
from collections import Counter
|
|
|
|
| 1491 |
|
| 1492 |
import matplotlib.pyplot as plt
|
| 1493 |
|
| 1494 |
+
import psutil
|
| 1495 |
+
|
| 1496 |
+
from collections import defaultdict
|
| 1497 |
+
|
| 1498 |
###################################################################################
|
| 1499 |
#
|
| 1500 |
# Original TMIDI Tegridy helper functions
|
|
|
|
| 5596 |
for e in enhanced_melody_score_notes:
|
| 5597 |
dtime = max(0, min(max_score_time, e[1]-pe[1]))
|
| 5598 |
|
| 5599 |
+
if dtime > stime:
|
| 5600 |
if chu:
|
| 5601 |
mel_chunks.append(chu)
|
| 5602 |
chu = []
|
|
|
|
| 9610 |
|
| 9611 |
all_patches = [e[6] for e in escore_notes]
|
| 9612 |
|
| 9613 |
+
patches = ordered_set(all_patches)[:16]
|
| 9614 |
|
| 9615 |
instruments = [alpha_str(Number2patch[p]) for p in patches if p < 128]
|
| 9616 |
|
|
|
|
| 11096 |
return [ -1] * 6
|
| 11097 |
|
| 11098 |
###################################################################################
|
| 11099 |
+
|
| 11100 |
+
def escore_notes_core(escore_notes, core_len=128):
|
| 11101 |
+
|
| 11102 |
+
cscore = chordify_score([1000, escore_notes])
|
| 11103 |
+
|
| 11104 |
+
chords = []
|
| 11105 |
+
chords_idxs = []
|
| 11106 |
+
|
| 11107 |
+
for i, c in enumerate(cscore):
|
| 11108 |
+
|
| 11109 |
+
pitches = [e[4] for e in c if e[3] != 9]
|
| 11110 |
+
|
| 11111 |
+
if pitches:
|
| 11112 |
+
tones_chord = sorted(set([p % 12 for p in pitches]))
|
| 11113 |
+
|
| 11114 |
+
if tones_chord not in ALL_CHORDS_SORTED:
|
| 11115 |
+
tones_chord = check_and_fix_tones_chord(tones_chord)
|
| 11116 |
+
|
| 11117 |
+
chords.append(ALL_CHORDS_SORTED.index(tones_chord))
|
| 11118 |
+
chords_idxs.append(i)
|
| 11119 |
+
|
| 11120 |
+
mid = len(chords_idxs) // 2
|
| 11121 |
+
clen = core_len // 2
|
| 11122 |
+
|
| 11123 |
+
sidx = chords_idxs[mid-clen]
|
| 11124 |
+
eidx = chords_idxs[mid+clen]
|
| 11125 |
+
|
| 11126 |
+
core_chords = chords[mid-clen:mid+clen]
|
| 11127 |
+
core_score = flatten(cscore[sidx:eidx])
|
| 11128 |
+
|
| 11129 |
+
return core_score, core_chords
|
| 11130 |
+
|
| 11131 |
+
###################################################################################
|
| 11132 |
+
|
| 11133 |
+
def multiprocessing_wrapper(function, data_list):
|
| 11134 |
+
|
| 11135 |
+
with multiprocessing.Pool() as pool:
|
| 11136 |
+
|
| 11137 |
+
results = []
|
| 11138 |
+
|
| 11139 |
+
for result in tqdm.tqdm(pool.imap_unordered(function, data_list), total=len(data_list)):
|
| 11140 |
+
results.append(result)
|
| 11141 |
+
|
| 11142 |
+
return results
|
| 11143 |
+
|
| 11144 |
+
###################################################################################
|
| 11145 |
+
|
| 11146 |
+
def rle_encode_ones(matrix, div_mod=-1):
|
| 11147 |
+
|
| 11148 |
+
flat_list = [val for row in matrix for val in row]
|
| 11149 |
+
|
| 11150 |
+
encoding = []
|
| 11151 |
+
i = 0
|
| 11152 |
+
|
| 11153 |
+
while i < len(flat_list):
|
| 11154 |
+
|
| 11155 |
+
if flat_list[i] == 1:
|
| 11156 |
+
|
| 11157 |
+
start_index = i
|
| 11158 |
+
count = 1
|
| 11159 |
+
i += 1
|
| 11160 |
+
|
| 11161 |
+
while i < len(flat_list) and flat_list[i] == 1:
|
| 11162 |
+
count += 1
|
| 11163 |
+
i += 1
|
| 11164 |
+
|
| 11165 |
+
if div_mod > 0:
|
| 11166 |
+
encoding.append((start_index // div_mod, start_index % div_mod))
|
| 11167 |
+
|
| 11168 |
+
else:
|
| 11169 |
+
encoding.append(start_index)
|
| 11170 |
+
|
| 11171 |
+
else:
|
| 11172 |
+
i += 1
|
| 11173 |
+
|
| 11174 |
+
return encoding
|
| 11175 |
+
|
| 11176 |
+
###################################################################################
|
| 11177 |
+
|
| 11178 |
+
def rle_decode_ones(encoding, size=(128, 128)):
|
| 11179 |
+
|
| 11180 |
+
flat_list = [0] * (size[0] * size[1])
|
| 11181 |
+
|
| 11182 |
+
for start_index in encoding:
|
| 11183 |
+
flat_list[start_index] = 1
|
| 11184 |
+
|
| 11185 |
+
matrix = [flat_list[i * size[1]:(i + 1) * size[1]] for i in range(size[0])]
|
| 11186 |
+
|
| 11187 |
+
return matrix
|
| 11188 |
+
|
| 11189 |
+
###################################################################################
|
| 11190 |
+
|
| 11191 |
+
def vertical_list_search(list_of_lists, trg_list):
|
| 11192 |
+
|
| 11193 |
+
src_list = list_of_lists
|
| 11194 |
+
|
| 11195 |
+
if not src_list or not trg_list:
|
| 11196 |
+
return []
|
| 11197 |
+
|
| 11198 |
+
num_rows = len(src_list)
|
| 11199 |
+
k = len(trg_list)
|
| 11200 |
+
|
| 11201 |
+
row_sets = [set(row) for row in src_list]
|
| 11202 |
+
|
| 11203 |
+
results = []
|
| 11204 |
+
|
| 11205 |
+
for start in range(num_rows - k + 1):
|
| 11206 |
+
valid = True
|
| 11207 |
+
|
| 11208 |
+
for offset, target in enumerate(trg_list):
|
| 11209 |
+
|
| 11210 |
+
if target not in row_sets[start + offset]:
|
| 11211 |
+
valid = False
|
| 11212 |
+
break
|
| 11213 |
+
|
| 11214 |
+
if valid:
|
| 11215 |
+
results.append(list(range(start, start + k)))
|
| 11216 |
+
|
| 11217 |
+
return results
|
| 11218 |
+
|
| 11219 |
+
###################################################################################
|
| 11220 |
+
|
| 11221 |
+
def smooth_values(values, window_size=3):
|
| 11222 |
+
|
| 11223 |
+
smoothed = []
|
| 11224 |
+
|
| 11225 |
+
for i in range(len(values)):
|
| 11226 |
+
|
| 11227 |
+
start = max(0, i - window_size // 2)
|
| 11228 |
+
end = min(len(values), i + window_size // 2 + 1)
|
| 11229 |
+
|
| 11230 |
+
window = values[start:end]
|
| 11231 |
+
|
| 11232 |
+
smoothed.append(int(sum(window) / len(window)))
|
| 11233 |
+
|
| 11234 |
+
return smoothed
|
| 11235 |
+
|
| 11236 |
+
###################################################################################
|
| 11237 |
+
|
| 11238 |
+
def is_mostly_wide_peaks_and_valleys(values,
|
| 11239 |
+
min_range=32,
|
| 11240 |
+
threshold=0.7,
|
| 11241 |
+
smoothing_window=5
|
| 11242 |
+
):
|
| 11243 |
+
|
| 11244 |
+
if not values:
|
| 11245 |
+
return False
|
| 11246 |
+
|
| 11247 |
+
smoothed_values = smooth_values(values, smoothing_window)
|
| 11248 |
+
|
| 11249 |
+
value_range = max(smoothed_values) - min(smoothed_values)
|
| 11250 |
+
|
| 11251 |
+
if value_range < min_range:
|
| 11252 |
+
return False
|
| 11253 |
+
|
| 11254 |
+
if all(v == smoothed_values[0] for v in smoothed_values):
|
| 11255 |
+
return False
|
| 11256 |
+
|
| 11257 |
+
trend_types = []
|
| 11258 |
+
|
| 11259 |
+
for i in range(1, len(smoothed_values)):
|
| 11260 |
+
if smoothed_values[i] > smoothed_values[i - 1]:
|
| 11261 |
+
trend_types.append(1)
|
| 11262 |
+
|
| 11263 |
+
elif smoothed_values[i] < smoothed_values[i - 1]:
|
| 11264 |
+
trend_types.append(-1)
|
| 11265 |
+
|
| 11266 |
+
else:
|
| 11267 |
+
trend_types.append(0)
|
| 11268 |
+
|
| 11269 |
+
trend_count = trend_types.count(1) + trend_types.count(-1)
|
| 11270 |
+
|
| 11271 |
+
proportion = trend_count / len(trend_types)
|
| 11272 |
+
|
| 11273 |
+
return proportion >= threshold
|
| 11274 |
+
|
| 11275 |
+
###################################################################################
|
| 11276 |
+
|
| 11277 |
+
def system_memory_utilization(return_dict=False):
|
| 11278 |
+
|
| 11279 |
+
if return_dict:
|
| 11280 |
+
return dict(psutil.virtual_memory()._asdict())
|
| 11281 |
+
|
| 11282 |
+
else:
|
| 11283 |
+
print('RAM memory % used:', psutil.virtual_memory()[2])
|
| 11284 |
+
print('RAM Used (GB):', psutil.virtual_memory()[3]/(1024**3))
|
| 11285 |
+
|
| 11286 |
+
###################################################################################
|
| 11287 |
+
|
| 11288 |
+
def create_files_list(datasets_paths=['./'],
|
| 11289 |
+
files_exts=['.mid', '.midi', '.kar', '.MID', '.MIDI', '.KAR'],
|
| 11290 |
+
randomize_files_list=True,
|
| 11291 |
+
verbose=True
|
| 11292 |
+
):
|
| 11293 |
+
if verbose:
|
| 11294 |
+
print('=' * 70)
|
| 11295 |
+
print('Searching for files...')
|
| 11296 |
+
print('This may take a while on a large dataset in particular...')
|
| 11297 |
+
print('=' * 70)
|
| 11298 |
+
|
| 11299 |
+
filez_set = defaultdict(None)
|
| 11300 |
+
|
| 11301 |
+
files_exts = tuple(files_exts)
|
| 11302 |
+
|
| 11303 |
+
for dataset_addr in tqdm.tqdm(datasets_paths):
|
| 11304 |
+
for dirpath, dirnames, filenames in os.walk(dataset_addr):
|
| 11305 |
+
for file in filenames:
|
| 11306 |
+
if file not in filez_set and file.endswith(files_exts):
|
| 11307 |
+
filez_set[os.path.join(dirpath, file)] = None
|
| 11308 |
+
|
| 11309 |
+
filez = list(filez_set.keys())
|
| 11310 |
+
|
| 11311 |
+
if verbose:
|
| 11312 |
+
print('Done!')
|
| 11313 |
+
print('=' * 70)
|
| 11314 |
+
|
| 11315 |
+
if filez:
|
| 11316 |
+
if randomize_files_list:
|
| 11317 |
+
|
| 11318 |
+
if verbose:
|
| 11319 |
+
print('Randomizing file list...')
|
| 11320 |
+
|
| 11321 |
+
random.shuffle(filez)
|
| 11322 |
+
|
| 11323 |
+
if verbose:
|
| 11324 |
+
print('Done!')
|
| 11325 |
+
print('=' * 70)
|
| 11326 |
+
|
| 11327 |
+
if verbose:
|
| 11328 |
+
print('Found', len(filez), 'files.')
|
| 11329 |
+
print('=' * 70)
|
| 11330 |
+
|
| 11331 |
+
else:
|
| 11332 |
+
if verbose:
|
| 11333 |
+
print('Could not find any files...')
|
| 11334 |
+
print('Please check dataset dirs and files extensions...')
|
| 11335 |
+
print('=' * 70)
|
| 11336 |
+
|
| 11337 |
+
return filez
|
| 11338 |
+
|
| 11339 |
+
###################################################################################
|
| 11340 |
+
|
| 11341 |
+
def has_consecutive_trend(nums, count):
|
| 11342 |
+
|
| 11343 |
+
if len(nums) < count:
|
| 11344 |
+
return False
|
| 11345 |
+
|
| 11346 |
+
increasing_streak = 1
|
| 11347 |
+
decreasing_streak = 1
|
| 11348 |
+
|
| 11349 |
+
for i in range(1, len(nums)):
|
| 11350 |
+
if nums[i] > nums[i - 1]:
|
| 11351 |
+
increasing_streak += 1
|
| 11352 |
+
decreasing_streak = 1
|
| 11353 |
+
|
| 11354 |
+
elif nums[i] < nums[i - 1]:
|
| 11355 |
+
decreasing_streak += 1
|
| 11356 |
+
increasing_streak = 1
|
| 11357 |
+
|
| 11358 |
+
else:
|
| 11359 |
+
increasing_streak = decreasing_streak = 1
|
| 11360 |
+
|
| 11361 |
+
if increasing_streak == count or decreasing_streak == count:
|
| 11362 |
+
return True
|
| 11363 |
+
|
| 11364 |
+
return False
|
| 11365 |
+
|
| 11366 |
+
###################################################################################
|
| 11367 |
# This is the end of the TMIDI X Python module
|
|
|
|
| 11368 |
###################################################################################
|
TPLOTS.py
CHANGED
|
@@ -4,12 +4,12 @@ r'''############################################################################
|
|
| 4 |
################################################################################
|
| 5 |
#
|
| 6 |
#
|
| 7 |
-
#
|
| 8 |
-
#
|
| 9 |
#
|
| 10 |
-
#
|
| 11 |
#
|
| 12 |
-
#
|
| 13 |
#
|
| 14 |
# https://github.com/asigalov61/tegridy-tools
|
| 15 |
#
|
|
@@ -33,20 +33,20 @@ r'''############################################################################
|
|
| 33 |
################################################################################
|
| 34 |
################################################################################
|
| 35 |
#
|
| 36 |
-
#
|
| 37 |
#
|
| 38 |
-
#
|
| 39 |
-
#
|
| 40 |
-
#
|
| 41 |
-
#
|
| 42 |
-
#
|
| 43 |
#
|
| 44 |
################################################################################
|
| 45 |
#
|
| 46 |
-
#
|
| 47 |
#
|
| 48 |
-
#
|
| 49 |
-
#
|
| 50 |
#
|
| 51 |
################################################################################
|
| 52 |
'''
|
|
@@ -1254,6 +1254,113 @@ def plot_parsons_code(parsons_code,
|
|
| 1254 |
|
| 1255 |
plt.close()
|
| 1256 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1257 |
################################################################################
|
| 1258 |
# [WIP] Future dev functions
|
| 1259 |
################################################################################
|
|
@@ -1363,7 +1470,53 @@ plt.show()
|
|
| 1363 |
'''
|
| 1364 |
|
| 1365 |
################################################################################
|
| 1366 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1367 |
# This is the end of TPLOTS Python modules
|
| 1368 |
-
#
|
| 1369 |
################################################################################
|
|
|
|
| 4 |
################################################################################
|
| 5 |
#
|
| 6 |
#
|
| 7 |
+
# Tegridy Plots Python Module (TPLOTS)
|
| 8 |
+
# Version 1.0
|
| 9 |
#
|
| 10 |
+
# Project Los Angeles
|
| 11 |
#
|
| 12 |
+
# Tegridy Code 2025
|
| 13 |
#
|
| 14 |
# https://github.com/asigalov61/tegridy-tools
|
| 15 |
#
|
|
|
|
| 33 |
################################################################################
|
| 34 |
################################################################################
|
| 35 |
#
|
| 36 |
+
# Critical dependencies
|
| 37 |
#
|
| 38 |
+
# !pip install numpy==1.24.4
|
| 39 |
+
# !pip install scipy
|
| 40 |
+
# !pip install matplotlib
|
| 41 |
+
# !pip install networkx
|
| 42 |
+
# !pip3 install scikit-learn
|
| 43 |
#
|
| 44 |
################################################################################
|
| 45 |
#
|
| 46 |
+
# Future critical dependencies
|
| 47 |
#
|
| 48 |
+
# !pip install umap-learn
|
| 49 |
+
# !pip install alphashape
|
| 50 |
#
|
| 51 |
################################################################################
|
| 52 |
'''
|
|
|
|
| 1254 |
|
| 1255 |
plt.close()
|
| 1256 |
|
| 1257 |
+
################################################################################
|
| 1258 |
+
|
| 1259 |
+
def plot_tokens_embeddings_constellation(tokens_embeddings,
|
| 1260 |
+
start_token,
|
| 1261 |
+
end_token,
|
| 1262 |
+
plot_size=(10, 10),
|
| 1263 |
+
labels_size=12,
|
| 1264 |
+
show_grid=False,
|
| 1265 |
+
save_plot=''):
|
| 1266 |
+
|
| 1267 |
+
"""
|
| 1268 |
+
Plots token embeddings constellation using MST and graph layout
|
| 1269 |
+
without dimensionality reduction.
|
| 1270 |
+
"""
|
| 1271 |
+
|
| 1272 |
+
distance_matrix = metrics.pairwise_distances(tokens_embeddings[start_token:end_token], metric='cosine')
|
| 1273 |
+
|
| 1274 |
+
token_labels = [str(i) for i in range(start_token, end_token)]
|
| 1275 |
+
|
| 1276 |
+
mst = minimum_spanning_tree(distance_matrix).toarray()
|
| 1277 |
+
|
| 1278 |
+
n = distance_matrix.shape[0]
|
| 1279 |
+
G = nx.Graph()
|
| 1280 |
+
|
| 1281 |
+
for i in range(n):
|
| 1282 |
+
for j in range(n):
|
| 1283 |
+
if mst[i, j] > 0:
|
| 1284 |
+
weight = 1 / (distance_matrix[i, j] + 1e-8)
|
| 1285 |
+
G.add_edge(i, j, weight=weight)
|
| 1286 |
+
|
| 1287 |
+
pos = nx.kamada_kawai_layout(G, weight='weight')
|
| 1288 |
+
|
| 1289 |
+
points = np.array([pos[i] for i in range(n)])
|
| 1290 |
+
|
| 1291 |
+
plt.figure(figsize=plot_size)
|
| 1292 |
+
plt.scatter(points[:, 0], points[:, 1], color='blue')
|
| 1293 |
+
|
| 1294 |
+
for i, label in enumerate(token_labels):
|
| 1295 |
+
plt.annotate(label, (points[i, 0], points[i, 1]),
|
| 1296 |
+
textcoords="offset points",
|
| 1297 |
+
xytext=(0, 10),
|
| 1298 |
+
ha='center',
|
| 1299 |
+
fontsize=labels_size)
|
| 1300 |
+
|
| 1301 |
+
for i in range(n):
|
| 1302 |
+
for j in range(n):
|
| 1303 |
+
if mst[i, j] > 0:
|
| 1304 |
+
plt.plot([points[i, 0], points[j, 0]],
|
| 1305 |
+
[points[i, 1], points[j, 1]],
|
| 1306 |
+
'k--', alpha=0.5)
|
| 1307 |
+
|
| 1308 |
+
plt.title('Token Embeddings Constellation with MST', fontsize=labels_size)
|
| 1309 |
+
plt.grid(show_grid)
|
| 1310 |
+
|
| 1311 |
+
if save_plot:
|
| 1312 |
+
plt.savefig(save_plot, bbox_inches="tight")
|
| 1313 |
+
plt.close()
|
| 1314 |
+
|
| 1315 |
+
else:
|
| 1316 |
+
plt.show()
|
| 1317 |
+
|
| 1318 |
+
plt.close()
|
| 1319 |
+
|
| 1320 |
+
################################################################################
|
| 1321 |
+
|
| 1322 |
+
def find_token_path(tokens_embeddings,
|
| 1323 |
+
start_token,
|
| 1324 |
+
end_token,
|
| 1325 |
+
verbose=False
|
| 1326 |
+
):
|
| 1327 |
+
|
| 1328 |
+
"""
|
| 1329 |
+
Finds the path of tokens between start_token and end_token using
|
| 1330 |
+
the Minimum Spanning Tree (MST) derived from the distance matrix.
|
| 1331 |
+
"""
|
| 1332 |
+
|
| 1333 |
+
distance_matrix = metrics.pairwise_distances(tokens_embeddings, metric='cosine')
|
| 1334 |
+
|
| 1335 |
+
token_labels = [str(i) for i in range(len(distance_matrix))]
|
| 1336 |
+
|
| 1337 |
+
if verbose:
|
| 1338 |
+
print('Total number of tokens:', len(distance_matrix))
|
| 1339 |
+
|
| 1340 |
+
mst = minimum_spanning_tree(distance_matrix).toarray()
|
| 1341 |
+
|
| 1342 |
+
n = distance_matrix.shape[0]
|
| 1343 |
+
G = nx.Graph()
|
| 1344 |
+
|
| 1345 |
+
for i in range(n):
|
| 1346 |
+
for j in range(n):
|
| 1347 |
+
if mst[i, j] > 0:
|
| 1348 |
+
weight = 1 / (distance_matrix[i, j] + 1e-8)
|
| 1349 |
+
G.add_edge(i, j, weight=weight)
|
| 1350 |
+
|
| 1351 |
+
try:
|
| 1352 |
+
start_idx = token_labels.index(str(start_token))
|
| 1353 |
+
end_idx = token_labels.index(str(end_token))
|
| 1354 |
+
|
| 1355 |
+
except ValueError:
|
| 1356 |
+
raise ValueError("Start or end token not found in the provided token labels.")
|
| 1357 |
+
|
| 1358 |
+
path_indices = nx.shortest_path(G, source=start_idx, target=end_idx)
|
| 1359 |
+
|
| 1360 |
+
token_path = [int(token_labels[idx]) for idx in path_indices]
|
| 1361 |
+
|
| 1362 |
+
return token_path
|
| 1363 |
+
|
| 1364 |
################################################################################
|
| 1365 |
# [WIP] Future dev functions
|
| 1366 |
################################################################################
|
|
|
|
| 1470 |
'''
|
| 1471 |
|
| 1472 |
################################################################################
|
| 1473 |
+
|
| 1474 |
+
def plot_tree_horizontal(data):
|
| 1475 |
+
|
| 1476 |
+
"""
|
| 1477 |
+
Given data as a list of levels (each level is a tuple or list of
|
| 1478 |
+
displacements for each branch), this function computes the cumulative
|
| 1479 |
+
value per branch (starting from 0) and plots each branch
|
| 1480 |
+
with the tree level mapped to the x-axis and the cumulative value mapped
|
| 1481 |
+
to the y-axis. This gives a left-to-right tree with branches spanning up
|
| 1482 |
+
(positive) and down (negative).
|
| 1483 |
+
|
| 1484 |
+
Parameters:
|
| 1485 |
+
data (list of tuple/list): Each element represents a tree level.
|
| 1486 |
+
It is assumed every level has the same length.
|
| 1487 |
+
"""
|
| 1488 |
+
|
| 1489 |
+
# Convert data to a NumPy array with shape (n_levels, n_branches)
|
| 1490 |
+
data = np.array(data)
|
| 1491 |
+
n_levels, n_branches = data.shape
|
| 1492 |
+
|
| 1493 |
+
# Compute cumulative sums along each branch.
|
| 1494 |
+
# Each branch starts at 0 at level 0.
|
| 1495 |
+
cum = np.zeros((n_levels + 1, n_branches))
|
| 1496 |
+
for i in range(n_levels):
|
| 1497 |
+
cum[i + 1, :] = cum[i, :] + data[i, :]
|
| 1498 |
+
|
| 1499 |
+
plt.figure(figsize=(12, 8))
|
| 1500 |
+
|
| 1501 |
+
# Plot each branch as a line. For branch j:
|
| 1502 |
+
# - x coordinates are the tree levels (0 to n_levels)
|
| 1503 |
+
# - y coordinates are the corresponding cumulative values.
|
| 1504 |
+
for j in range(n_branches):
|
| 1505 |
+
x = np.arange(n_levels + 1)
|
| 1506 |
+
y = cum[:, j]
|
| 1507 |
+
plt.plot(x, y, marker='o', label=f'Branch {j}')
|
| 1508 |
+
|
| 1509 |
+
plt.title("Horizontal Tree Visualization: Branches Spanning Up and Down", fontsize=14)
|
| 1510 |
+
plt.xlabel("Tree Level (Left = Root)")
|
| 1511 |
+
plt.ylabel("Cumulative Value (Up = Positive, Down = Negative)")
|
| 1512 |
+
|
| 1513 |
+
# Add a horizontal line at y=0 to emphasize the center.
|
| 1514 |
+
plt.axhline(0, color="gray", linestyle="--")
|
| 1515 |
+
|
| 1516 |
+
#plt.legend(bbox_to_anchor=(1.05, 1), loc='upper left')
|
| 1517 |
+
plt.tight_layout()
|
| 1518 |
+
plt.show()
|
| 1519 |
+
|
| 1520 |
+
################################################################################
|
| 1521 |
# This is the end of TPLOTS Python modules
|
|
|
|
| 1522 |
################################################################################
|
monster_search_and_filter.py
ADDED
|
@@ -0,0 +1,2002 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#! /usr/bin/python3
|
| 2 |
+
|
| 3 |
+
r'''###############################################################################
|
| 4 |
+
###################################################################################
|
| 5 |
+
#
|
| 6 |
+
#
|
| 7 |
+
# Monster Search and Filter Python Module
|
| 8 |
+
# Version 1.0
|
| 9 |
+
#
|
| 10 |
+
# NOTE: Module code starts after the partial MIDI.py module @ line 1059
|
| 11 |
+
#
|
| 12 |
+
# Based upon MIDI.py module v.6.7. by Peter Billam / pjb.com.au
|
| 13 |
+
#
|
| 14 |
+
# Project Los Angeles
|
| 15 |
+
#
|
| 16 |
+
# Tegridy Code 2025
|
| 17 |
+
#
|
| 18 |
+
# https://github.com/Tegridy-Code/Project-Los-Angeles
|
| 19 |
+
#
|
| 20 |
+
#
|
| 21 |
+
###################################################################################
|
| 22 |
+
###################################################################################
|
| 23 |
+
#
|
| 24 |
+
# Copyright 2025 Project Los Angeles / Tegridy Code
|
| 25 |
+
#
|
| 26 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 27 |
+
# you may not use this file except in compliance with the License.
|
| 28 |
+
# You may obtain a copy of the License at
|
| 29 |
+
#
|
| 30 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 31 |
+
#
|
| 32 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 33 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 34 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 35 |
+
# See the License for the specific language governing permissions and
|
| 36 |
+
# limitations under the License.
|
| 37 |
+
#
|
| 38 |
+
###################################################################################
|
| 39 |
+
###################################################################################
|
| 40 |
+
#
|
| 41 |
+
# PARTIAL MIDI.py Module v.6.7. by Peter Billam
|
| 42 |
+
# Please see TMIDI 2.3/tegridy-tools repo for full MIDI.py module code
|
| 43 |
+
#
|
| 44 |
+
# Or you can always download the latest full version from:
|
| 45 |
+
#
|
| 46 |
+
# https://pjb.com.au/
|
| 47 |
+
# https://peterbillam.gitlab.io/miditools/
|
| 48 |
+
#
|
| 49 |
+
# Copyright 2020 Peter Billam
|
| 50 |
+
#
|
| 51 |
+
###################################################################################
|
| 52 |
+
###################################################################################
|
| 53 |
+
#
|
| 54 |
+
# Critical dependencies
|
| 55 |
+
#
|
| 56 |
+
# !pip install cupy-cuda12x
|
| 57 |
+
# !pip install numpy==1.24.4
|
| 58 |
+
#
|
| 59 |
+
###################################################################################
|
| 60 |
+
###################################################################################
|
| 61 |
+
#
|
| 62 |
+
# Basic use example
|
| 63 |
+
#
|
| 64 |
+
# import monster_search_and_filter
|
| 65 |
+
#
|
| 66 |
+
# sigs_data_path = './Monster-MIDI-Dataset/SIGNATURES_DATA/MONSTER_SIGNATURES_DATA.pickle'
|
| 67 |
+
#
|
| 68 |
+
# sigs_data = monster_search_and_filter.load_pickle(sigs_data_path)
|
| 69 |
+
# sigs_dicts = monster_search_and_filter.load_signatures(sigs_data)
|
| 70 |
+
# X, global_union = monster_search_and_filter.precompute_signatures(sigs_dicts)
|
| 71 |
+
#
|
| 72 |
+
# monster_search_and_filter.search_and_filter(sigs_dicts, X, global_union)
|
| 73 |
+
#
|
| 74 |
+
###################################################################################
|
| 75 |
+
'''
|
| 76 |
+
|
| 77 |
+
###################################################################################
|
| 78 |
+
|
| 79 |
+
print('=' * 70)
|
| 80 |
+
print('Loading module...')
|
| 81 |
+
print('Please wait...')
|
| 82 |
+
print('=' * 70)
|
| 83 |
+
|
| 84 |
+
###################################################################################
|
| 85 |
+
|
| 86 |
+
import sys, struct, copy
|
| 87 |
+
|
| 88 |
+
Version = '6.7'
|
| 89 |
+
VersionDate = '20201120'
|
| 90 |
+
|
| 91 |
+
_previous_warning = '' # 5.4
|
| 92 |
+
_previous_times = 0 # 5.4
|
| 93 |
+
_no_warning = False
|
| 94 |
+
|
| 95 |
+
#------------------------------- Encoding stuff --------------------------
|
| 96 |
+
|
| 97 |
+
def score2opus(score=None, text_encoding='ISO-8859-1'):
|
| 98 |
+
r'''
|
| 99 |
+
The argument is a list: the first item in the list is the "ticks"
|
| 100 |
+
parameter, the others are the tracks. Each track is a list
|
| 101 |
+
of score-events, and each event is itself a list. A score-event
|
| 102 |
+
is similar to an opus-event (see above), except that in a score:
|
| 103 |
+
1) the times are expressed as an absolute number of ticks
|
| 104 |
+
from the track's start time
|
| 105 |
+
2) the pairs of 'note_on' and 'note_off' events in an "opus"
|
| 106 |
+
are abstracted into a single 'note' event in a "score":
|
| 107 |
+
['note', start_time, duration, channel, pitch, velocity]
|
| 108 |
+
score2opus() returns a list specifying the equivalent "opus".
|
| 109 |
+
|
| 110 |
+
my_score = [
|
| 111 |
+
96,
|
| 112 |
+
[ # track 0:
|
| 113 |
+
['patch_change', 0, 1, 8],
|
| 114 |
+
['note', 5, 96, 1, 25, 96],
|
| 115 |
+
['note', 101, 96, 1, 29, 96]
|
| 116 |
+
], # end of track 0
|
| 117 |
+
]
|
| 118 |
+
my_opus = score2opus(my_score)
|
| 119 |
+
'''
|
| 120 |
+
if len(score) < 2:
|
| 121 |
+
score=[1000, [],]
|
| 122 |
+
tracks = copy.deepcopy(score)
|
| 123 |
+
ticks = int(tracks.pop(0))
|
| 124 |
+
opus_tracks = []
|
| 125 |
+
for scoretrack in tracks:
|
| 126 |
+
time2events = dict([])
|
| 127 |
+
for scoreevent in scoretrack:
|
| 128 |
+
if scoreevent[0] == 'note':
|
| 129 |
+
note_on_event = ['note_on',scoreevent[1],
|
| 130 |
+
scoreevent[3],scoreevent[4],scoreevent[5]]
|
| 131 |
+
note_off_event = ['note_off',scoreevent[1]+scoreevent[2],
|
| 132 |
+
scoreevent[3],scoreevent[4],scoreevent[5]]
|
| 133 |
+
if time2events.get(note_on_event[1]):
|
| 134 |
+
time2events[note_on_event[1]].append(note_on_event)
|
| 135 |
+
else:
|
| 136 |
+
time2events[note_on_event[1]] = [note_on_event,]
|
| 137 |
+
if time2events.get(note_off_event[1]):
|
| 138 |
+
time2events[note_off_event[1]].append(note_off_event)
|
| 139 |
+
else:
|
| 140 |
+
time2events[note_off_event[1]] = [note_off_event,]
|
| 141 |
+
continue
|
| 142 |
+
if time2events.get(scoreevent[1]):
|
| 143 |
+
time2events[scoreevent[1]].append(scoreevent)
|
| 144 |
+
else:
|
| 145 |
+
time2events[scoreevent[1]] = [scoreevent,]
|
| 146 |
+
|
| 147 |
+
sorted_times = [] # list of keys
|
| 148 |
+
for k in time2events.keys():
|
| 149 |
+
sorted_times.append(k)
|
| 150 |
+
sorted_times.sort()
|
| 151 |
+
|
| 152 |
+
sorted_events = [] # once-flattened list of values sorted by key
|
| 153 |
+
for time in sorted_times:
|
| 154 |
+
sorted_events.extend(time2events[time])
|
| 155 |
+
|
| 156 |
+
abs_time = 0
|
| 157 |
+
for event in sorted_events: # convert abs times => delta times
|
| 158 |
+
delta_time = event[1] - abs_time
|
| 159 |
+
abs_time = event[1]
|
| 160 |
+
event[1] = delta_time
|
| 161 |
+
opus_tracks.append(sorted_events)
|
| 162 |
+
opus_tracks.insert(0,ticks)
|
| 163 |
+
_clean_up_warnings()
|
| 164 |
+
return opus_tracks
|
| 165 |
+
|
| 166 |
+
#--------------------------- Decoding stuff ------------------------
|
| 167 |
+
|
| 168 |
+
def midi2opus(midi=b'', do_not_check_MIDI_signature=False):
|
| 169 |
+
r'''Translates MIDI into a "opus". For a description of the
|
| 170 |
+
"opus" format, see opus2midi()
|
| 171 |
+
'''
|
| 172 |
+
my_midi=bytearray(midi)
|
| 173 |
+
if len(my_midi) < 4:
|
| 174 |
+
_clean_up_warnings()
|
| 175 |
+
return [1000,[],]
|
| 176 |
+
id = bytes(my_midi[0:4])
|
| 177 |
+
if id != b'MThd':
|
| 178 |
+
_warn("midi2opus: midi starts with "+str(id)+" instead of 'MThd'")
|
| 179 |
+
_clean_up_warnings()
|
| 180 |
+
if do_not_check_MIDI_signature == False:
|
| 181 |
+
return [1000,[],]
|
| 182 |
+
[length, format, tracks_expected, ticks] = struct.unpack(
|
| 183 |
+
'>IHHH', bytes(my_midi[4:14]))
|
| 184 |
+
if length != 6:
|
| 185 |
+
_warn("midi2opus: midi header length was "+str(length)+" instead of 6")
|
| 186 |
+
_clean_up_warnings()
|
| 187 |
+
return [1000,[],]
|
| 188 |
+
my_opus = [ticks,]
|
| 189 |
+
my_midi = my_midi[14:]
|
| 190 |
+
track_num = 1 # 5.1
|
| 191 |
+
while len(my_midi) >= 8:
|
| 192 |
+
track_type = bytes(my_midi[0:4])
|
| 193 |
+
if track_type != b'MTrk':
|
| 194 |
+
#_warn('midi2opus: Warning: track #'+str(track_num)+' type is '+str(track_type)+" instead of b'MTrk'")
|
| 195 |
+
pass
|
| 196 |
+
[track_length] = struct.unpack('>I', my_midi[4:8])
|
| 197 |
+
my_midi = my_midi[8:]
|
| 198 |
+
if track_length > len(my_midi):
|
| 199 |
+
_warn('midi2opus: track #'+str(track_num)+' length '+str(track_length)+' is too large')
|
| 200 |
+
_clean_up_warnings()
|
| 201 |
+
return my_opus # 5.0
|
| 202 |
+
my_midi_track = my_midi[0:track_length]
|
| 203 |
+
my_track = _decode(my_midi_track)
|
| 204 |
+
my_opus.append(my_track)
|
| 205 |
+
my_midi = my_midi[track_length:]
|
| 206 |
+
track_num += 1 # 5.1
|
| 207 |
+
_clean_up_warnings()
|
| 208 |
+
return my_opus
|
| 209 |
+
|
| 210 |
+
def opus2score(opus=[]):
|
| 211 |
+
r'''For a description of the "opus" and "score" formats,
|
| 212 |
+
see opus2midi() and score2opus().
|
| 213 |
+
'''
|
| 214 |
+
if len(opus) < 2:
|
| 215 |
+
_clean_up_warnings()
|
| 216 |
+
return [1000,[],]
|
| 217 |
+
tracks = copy.deepcopy(opus) # couple of slices probably quicker...
|
| 218 |
+
ticks = int(tracks.pop(0))
|
| 219 |
+
score = [ticks,]
|
| 220 |
+
for opus_track in tracks:
|
| 221 |
+
ticks_so_far = 0
|
| 222 |
+
score_track = []
|
| 223 |
+
chapitch2note_on_events = dict([]) # 4.0
|
| 224 |
+
for opus_event in opus_track:
|
| 225 |
+
ticks_so_far += opus_event[1]
|
| 226 |
+
if opus_event[0] == 'note_off' or (opus_event[0] == 'note_on' and opus_event[4] == 0): # 4.8
|
| 227 |
+
cha = opus_event[2]
|
| 228 |
+
pitch = opus_event[3]
|
| 229 |
+
key = cha*128 + pitch
|
| 230 |
+
if chapitch2note_on_events.get(key):
|
| 231 |
+
new_event = chapitch2note_on_events[key].pop(0)
|
| 232 |
+
new_event[2] = ticks_so_far - new_event[1]
|
| 233 |
+
score_track.append(new_event)
|
| 234 |
+
elif pitch > 127:
|
| 235 |
+
pass #_warn('opus2score: note_off with no note_on, bad pitch='+str(pitch))
|
| 236 |
+
else:
|
| 237 |
+
pass #_warn('opus2score: note_off with no note_on cha='+str(cha)+' pitch='+str(pitch))
|
| 238 |
+
elif opus_event[0] == 'note_on':
|
| 239 |
+
cha = opus_event[2]
|
| 240 |
+
pitch = opus_event[3]
|
| 241 |
+
key = cha*128 + pitch
|
| 242 |
+
new_event = ['note',ticks_so_far,0,cha,pitch, opus_event[4]]
|
| 243 |
+
if chapitch2note_on_events.get(key):
|
| 244 |
+
chapitch2note_on_events[key].append(new_event)
|
| 245 |
+
else:
|
| 246 |
+
chapitch2note_on_events[key] = [new_event,]
|
| 247 |
+
else:
|
| 248 |
+
opus_event[1] = ticks_so_far
|
| 249 |
+
score_track.append(opus_event)
|
| 250 |
+
# check for unterminated notes (Oisín) -- 5.2
|
| 251 |
+
for chapitch in chapitch2note_on_events:
|
| 252 |
+
note_on_events = chapitch2note_on_events[chapitch]
|
| 253 |
+
for new_e in note_on_events:
|
| 254 |
+
new_e[2] = ticks_so_far - new_e[1]
|
| 255 |
+
score_track.append(new_e)
|
| 256 |
+
pass #_warn("opus2score: note_on with no note_off cha="+str(new_e[3])+' pitch='+str(new_e[4])+'; adding note_off at end')
|
| 257 |
+
score.append(score_track)
|
| 258 |
+
_clean_up_warnings()
|
| 259 |
+
return score
|
| 260 |
+
|
| 261 |
+
def midi2score(midi=b'', do_not_check_MIDI_signature=False):
|
| 262 |
+
r'''
|
| 263 |
+
Translates MIDI into a "score", using midi2opus() then opus2score()
|
| 264 |
+
'''
|
| 265 |
+
return opus2score(midi2opus(midi, do_not_check_MIDI_signature))
|
| 266 |
+
|
| 267 |
+
def midi2ms_score(midi=b'', do_not_check_MIDI_signature=False):
|
| 268 |
+
r'''
|
| 269 |
+
Translates MIDI into a "score" with one beat per second and one
|
| 270 |
+
tick per millisecond, using midi2opus() then to_millisecs()
|
| 271 |
+
then opus2score()
|
| 272 |
+
'''
|
| 273 |
+
return opus2score(to_millisecs(midi2opus(midi, do_not_check_MIDI_signature)))
|
| 274 |
+
|
| 275 |
+
def midi2single_track_ms_score(midi_path_or_bytes,
|
| 276 |
+
recalculate_channels = False,
|
| 277 |
+
pass_old_timings_events= False,
|
| 278 |
+
verbose = False,
|
| 279 |
+
do_not_check_MIDI_signature=False
|
| 280 |
+
):
|
| 281 |
+
r'''
|
| 282 |
+
Translates MIDI into a single track "score" with 16 instruments and one beat per second and one
|
| 283 |
+
tick per millisecond
|
| 284 |
+
'''
|
| 285 |
+
|
| 286 |
+
if type(midi_path_or_bytes) == bytes:
|
| 287 |
+
midi_data = midi_path_or_bytes
|
| 288 |
+
|
| 289 |
+
elif type(midi_path_or_bytes) == str:
|
| 290 |
+
midi_data = open(midi_path_or_bytes, 'rb').read()
|
| 291 |
+
|
| 292 |
+
score = midi2score(midi_data, do_not_check_MIDI_signature)
|
| 293 |
+
|
| 294 |
+
if recalculate_channels:
|
| 295 |
+
|
| 296 |
+
events_matrixes = []
|
| 297 |
+
|
| 298 |
+
itrack = 1
|
| 299 |
+
events_matrixes_channels = []
|
| 300 |
+
while itrack < len(score):
|
| 301 |
+
events_matrix = []
|
| 302 |
+
for event in score[itrack]:
|
| 303 |
+
if event[0] == 'note' and event[3] != 9:
|
| 304 |
+
event[3] = (16 * (itrack-1)) + event[3]
|
| 305 |
+
if event[3] not in events_matrixes_channels:
|
| 306 |
+
events_matrixes_channels.append(event[3])
|
| 307 |
+
|
| 308 |
+
events_matrix.append(event)
|
| 309 |
+
events_matrixes.append(events_matrix)
|
| 310 |
+
itrack += 1
|
| 311 |
+
|
| 312 |
+
events_matrix1 = []
|
| 313 |
+
for e in events_matrixes:
|
| 314 |
+
events_matrix1.extend(e)
|
| 315 |
+
|
| 316 |
+
if verbose:
|
| 317 |
+
if len(events_matrixes_channels) > 16:
|
| 318 |
+
print('MIDI has', len(events_matrixes_channels), 'instruments!', len(events_matrixes_channels) - 16, 'instrument(s) will be removed!')
|
| 319 |
+
|
| 320 |
+
for e in events_matrix1:
|
| 321 |
+
if e[0] == 'note' and e[3] != 9:
|
| 322 |
+
if e[3] in events_matrixes_channels[:15]:
|
| 323 |
+
if events_matrixes_channels[:15].index(e[3]) < 9:
|
| 324 |
+
e[3] = events_matrixes_channels[:15].index(e[3])
|
| 325 |
+
else:
|
| 326 |
+
e[3] = events_matrixes_channels[:15].index(e[3])+1
|
| 327 |
+
else:
|
| 328 |
+
events_matrix1.remove(e)
|
| 329 |
+
|
| 330 |
+
if e[0] in ['patch_change', 'control_change', 'channel_after_touch', 'key_after_touch', 'pitch_wheel_change'] and e[2] != 9:
|
| 331 |
+
if e[2] in [e % 16 for e in events_matrixes_channels[:15]]:
|
| 332 |
+
if [e % 16 for e in events_matrixes_channels[:15]].index(e[2]) < 9:
|
| 333 |
+
e[2] = [e % 16 for e in events_matrixes_channels[:15]].index(e[2])
|
| 334 |
+
else:
|
| 335 |
+
e[2] = [e % 16 for e in events_matrixes_channels[:15]].index(e[2])+1
|
| 336 |
+
else:
|
| 337 |
+
events_matrix1.remove(e)
|
| 338 |
+
|
| 339 |
+
else:
|
| 340 |
+
events_matrix1 = []
|
| 341 |
+
itrack = 1
|
| 342 |
+
|
| 343 |
+
while itrack < len(score):
|
| 344 |
+
for event in score[itrack]:
|
| 345 |
+
events_matrix1.append(event)
|
| 346 |
+
itrack += 1
|
| 347 |
+
|
| 348 |
+
opus = score2opus([score[0], events_matrix1])
|
| 349 |
+
ms_score = opus2score(to_millisecs(opus, pass_old_timings_events=pass_old_timings_events))
|
| 350 |
+
|
| 351 |
+
return ms_score
|
| 352 |
+
|
| 353 |
+
#------------------------ Other Transformations ---------------------
|
| 354 |
+
|
| 355 |
+
def to_millisecs(old_opus=None, desired_time_in_ms=1, pass_old_timings_events = False):
|
| 356 |
+
r'''Recallibrates all the times in an "opus" to use one beat
|
| 357 |
+
per second and one tick per millisecond. This makes it
|
| 358 |
+
hard to retrieve any information about beats or barlines,
|
| 359 |
+
but it does make it easy to mix different scores together.
|
| 360 |
+
'''
|
| 361 |
+
if old_opus == None:
|
| 362 |
+
return [1000 * desired_time_in_ms,[],]
|
| 363 |
+
try:
|
| 364 |
+
old_tpq = int(old_opus[0])
|
| 365 |
+
except IndexError: # 5.0
|
| 366 |
+
_warn('to_millisecs: the opus '+str(type(old_opus))+' has no elements')
|
| 367 |
+
return [1000 * desired_time_in_ms,[],]
|
| 368 |
+
new_opus = [1000 * desired_time_in_ms,]
|
| 369 |
+
# 6.7 first go through building a table of set_tempos by absolute-tick
|
| 370 |
+
ticks2tempo = {}
|
| 371 |
+
itrack = 1
|
| 372 |
+
while itrack < len(old_opus):
|
| 373 |
+
ticks_so_far = 0
|
| 374 |
+
for old_event in old_opus[itrack]:
|
| 375 |
+
if old_event[0] == 'note':
|
| 376 |
+
raise TypeError('to_millisecs needs an opus, not a score')
|
| 377 |
+
ticks_so_far += old_event[1]
|
| 378 |
+
if old_event[0] == 'set_tempo':
|
| 379 |
+
ticks2tempo[ticks_so_far] = old_event[2]
|
| 380 |
+
itrack += 1
|
| 381 |
+
# then get the sorted-array of their keys
|
| 382 |
+
tempo_ticks = [] # list of keys
|
| 383 |
+
for k in ticks2tempo.keys():
|
| 384 |
+
tempo_ticks.append(k)
|
| 385 |
+
tempo_ticks.sort()
|
| 386 |
+
# then go through converting to millisec, testing if the next
|
| 387 |
+
# set_tempo lies before the next track-event, and using it if so.
|
| 388 |
+
itrack = 1
|
| 389 |
+
while itrack < len(old_opus):
|
| 390 |
+
ms_per_old_tick = 400 / old_tpq # float: will round later 6.3
|
| 391 |
+
i_tempo_ticks = 0
|
| 392 |
+
ticks_so_far = 0
|
| 393 |
+
ms_so_far = 0.0
|
| 394 |
+
previous_ms_so_far = 0.0
|
| 395 |
+
|
| 396 |
+
if pass_old_timings_events:
|
| 397 |
+
new_track = [['set_tempo',0,1000000 * desired_time_in_ms],['old_tpq', 0, old_tpq]] # new "crochet" is 1 sec
|
| 398 |
+
else:
|
| 399 |
+
new_track = [['set_tempo',0,1000000 * desired_time_in_ms],] # new "crochet" is 1 sec
|
| 400 |
+
for old_event in old_opus[itrack]:
|
| 401 |
+
# detect if ticks2tempo has something before this event
|
| 402 |
+
# 20160702 if ticks2tempo is at the same time, leave it
|
| 403 |
+
event_delta_ticks = old_event[1] * desired_time_in_ms
|
| 404 |
+
if (i_tempo_ticks < len(tempo_ticks) and
|
| 405 |
+
tempo_ticks[i_tempo_ticks] < (ticks_so_far + old_event[1]) * desired_time_in_ms):
|
| 406 |
+
delta_ticks = tempo_ticks[i_tempo_ticks] - ticks_so_far
|
| 407 |
+
ms_so_far += (ms_per_old_tick * delta_ticks * desired_time_in_ms)
|
| 408 |
+
ticks_so_far = tempo_ticks[i_tempo_ticks]
|
| 409 |
+
ms_per_old_tick = ticks2tempo[ticks_so_far] / (1000.0*old_tpq * desired_time_in_ms)
|
| 410 |
+
i_tempo_ticks += 1
|
| 411 |
+
event_delta_ticks -= delta_ticks
|
| 412 |
+
new_event = copy.deepcopy(old_event) # now handle the new event
|
| 413 |
+
ms_so_far += (ms_per_old_tick * old_event[1] * desired_time_in_ms)
|
| 414 |
+
new_event[1] = round(ms_so_far - previous_ms_so_far)
|
| 415 |
+
|
| 416 |
+
if pass_old_timings_events:
|
| 417 |
+
if old_event[0] != 'set_tempo':
|
| 418 |
+
previous_ms_so_far = ms_so_far
|
| 419 |
+
new_track.append(new_event)
|
| 420 |
+
else:
|
| 421 |
+
new_event[0] = 'old_set_tempo'
|
| 422 |
+
previous_ms_so_far = ms_so_far
|
| 423 |
+
new_track.append(new_event)
|
| 424 |
+
else:
|
| 425 |
+
if old_event[0] != 'set_tempo':
|
| 426 |
+
previous_ms_so_far = ms_so_far
|
| 427 |
+
new_track.append(new_event)
|
| 428 |
+
ticks_so_far += event_delta_ticks
|
| 429 |
+
new_opus.append(new_track)
|
| 430 |
+
itrack += 1
|
| 431 |
+
_clean_up_warnings()
|
| 432 |
+
return new_opus
|
| 433 |
+
|
| 434 |
+
#----------------------------- Event stuff --------------------------
|
| 435 |
+
|
| 436 |
+
_sysex2midimode = {
|
| 437 |
+
"\x7E\x7F\x09\x01\xF7": 1,
|
| 438 |
+
"\x7E\x7F\x09\x02\xF7": 0,
|
| 439 |
+
"\x7E\x7F\x09\x03\xF7": 2,
|
| 440 |
+
}
|
| 441 |
+
|
| 442 |
+
# Some public-access tuples:
|
| 443 |
+
MIDI_events = tuple('''note_off note_on key_after_touch
|
| 444 |
+
control_change patch_change channel_after_touch
|
| 445 |
+
pitch_wheel_change'''.split())
|
| 446 |
+
|
| 447 |
+
Text_events = tuple('''text_event copyright_text_event
|
| 448 |
+
track_name instrument_name lyric marker cue_point text_event_08
|
| 449 |
+
text_event_09 text_event_0a text_event_0b text_event_0c
|
| 450 |
+
text_event_0d text_event_0e text_event_0f'''.split())
|
| 451 |
+
|
| 452 |
+
Nontext_meta_events = tuple('''end_track set_tempo
|
| 453 |
+
smpte_offset time_signature key_signature sequencer_specific
|
| 454 |
+
raw_meta_event sysex_f0 sysex_f7 song_position song_select
|
| 455 |
+
tune_request'''.split())
|
| 456 |
+
# unsupported: raw_data
|
| 457 |
+
|
| 458 |
+
# Actually, 'tune_request' is is F-series event, not strictly a meta-event...
|
| 459 |
+
Meta_events = Text_events + Nontext_meta_events
|
| 460 |
+
All_events = MIDI_events + Meta_events
|
| 461 |
+
|
| 462 |
+
# And three dictionaries:
|
| 463 |
+
Number2patch = { # General MIDI patch numbers:
|
| 464 |
+
0:'Acoustic Grand',
|
| 465 |
+
1:'Bright Acoustic',
|
| 466 |
+
2:'Electric Grand',
|
| 467 |
+
3:'Honky-Tonk',
|
| 468 |
+
4:'Electric Piano 1',
|
| 469 |
+
5:'Electric Piano 2',
|
| 470 |
+
6:'Harpsichord',
|
| 471 |
+
7:'Clav',
|
| 472 |
+
8:'Celesta',
|
| 473 |
+
9:'Glockenspiel',
|
| 474 |
+
10:'Music Box',
|
| 475 |
+
11:'Vibraphone',
|
| 476 |
+
12:'Marimba',
|
| 477 |
+
13:'Xylophone',
|
| 478 |
+
14:'Tubular Bells',
|
| 479 |
+
15:'Dulcimer',
|
| 480 |
+
16:'Drawbar Organ',
|
| 481 |
+
17:'Percussive Organ',
|
| 482 |
+
18:'Rock Organ',
|
| 483 |
+
19:'Church Organ',
|
| 484 |
+
20:'Reed Organ',
|
| 485 |
+
21:'Accordion',
|
| 486 |
+
22:'Harmonica',
|
| 487 |
+
23:'Tango Accordion',
|
| 488 |
+
24:'Acoustic Guitar(nylon)',
|
| 489 |
+
25:'Acoustic Guitar(steel)',
|
| 490 |
+
26:'Electric Guitar(jazz)',
|
| 491 |
+
27:'Electric Guitar(clean)',
|
| 492 |
+
28:'Electric Guitar(muted)',
|
| 493 |
+
29:'Overdriven Guitar',
|
| 494 |
+
30:'Distortion Guitar',
|
| 495 |
+
31:'Guitar Harmonics',
|
| 496 |
+
32:'Acoustic Bass',
|
| 497 |
+
33:'Electric Bass(finger)',
|
| 498 |
+
34:'Electric Bass(pick)',
|
| 499 |
+
35:'Fretless Bass',
|
| 500 |
+
36:'Slap Bass 1',
|
| 501 |
+
37:'Slap Bass 2',
|
| 502 |
+
38:'Synth Bass 1',
|
| 503 |
+
39:'Synth Bass 2',
|
| 504 |
+
40:'Violin',
|
| 505 |
+
41:'Viola',
|
| 506 |
+
42:'Cello',
|
| 507 |
+
43:'Contrabass',
|
| 508 |
+
44:'Tremolo Strings',
|
| 509 |
+
45:'Pizzicato Strings',
|
| 510 |
+
46:'Orchestral Harp',
|
| 511 |
+
47:'Timpani',
|
| 512 |
+
48:'String Ensemble 1',
|
| 513 |
+
49:'String Ensemble 2',
|
| 514 |
+
50:'SynthStrings 1',
|
| 515 |
+
51:'SynthStrings 2',
|
| 516 |
+
52:'Choir Aahs',
|
| 517 |
+
53:'Voice Oohs',
|
| 518 |
+
54:'Synth Voice',
|
| 519 |
+
55:'Orchestra Hit',
|
| 520 |
+
56:'Trumpet',
|
| 521 |
+
57:'Trombone',
|
| 522 |
+
58:'Tuba',
|
| 523 |
+
59:'Muted Trumpet',
|
| 524 |
+
60:'French Horn',
|
| 525 |
+
61:'Brass Section',
|
| 526 |
+
62:'SynthBrass 1',
|
| 527 |
+
63:'SynthBrass 2',
|
| 528 |
+
64:'Soprano Sax',
|
| 529 |
+
65:'Alto Sax',
|
| 530 |
+
66:'Tenor Sax',
|
| 531 |
+
67:'Baritone Sax',
|
| 532 |
+
68:'Oboe',
|
| 533 |
+
69:'English Horn',
|
| 534 |
+
70:'Bassoon',
|
| 535 |
+
71:'Clarinet',
|
| 536 |
+
72:'Piccolo',
|
| 537 |
+
73:'Flute',
|
| 538 |
+
74:'Recorder',
|
| 539 |
+
75:'Pan Flute',
|
| 540 |
+
76:'Blown Bottle',
|
| 541 |
+
77:'Skakuhachi',
|
| 542 |
+
78:'Whistle',
|
| 543 |
+
79:'Ocarina',
|
| 544 |
+
80:'Lead 1 (square)',
|
| 545 |
+
81:'Lead 2 (sawtooth)',
|
| 546 |
+
82:'Lead 3 (calliope)',
|
| 547 |
+
83:'Lead 4 (chiff)',
|
| 548 |
+
84:'Lead 5 (charang)',
|
| 549 |
+
85:'Lead 6 (voice)',
|
| 550 |
+
86:'Lead 7 (fifths)',
|
| 551 |
+
87:'Lead 8 (bass+lead)',
|
| 552 |
+
88:'Pad 1 (new age)',
|
| 553 |
+
89:'Pad 2 (warm)',
|
| 554 |
+
90:'Pad 3 (polysynth)',
|
| 555 |
+
91:'Pad 4 (choir)',
|
| 556 |
+
92:'Pad 5 (bowed)',
|
| 557 |
+
93:'Pad 6 (metallic)',
|
| 558 |
+
94:'Pad 7 (halo)',
|
| 559 |
+
95:'Pad 8 (sweep)',
|
| 560 |
+
96:'FX 1 (rain)',
|
| 561 |
+
97:'FX 2 (soundtrack)',
|
| 562 |
+
98:'FX 3 (crystal)',
|
| 563 |
+
99:'FX 4 (atmosphere)',
|
| 564 |
+
100:'FX 5 (brightness)',
|
| 565 |
+
101:'FX 6 (goblins)',
|
| 566 |
+
102:'FX 7 (echoes)',
|
| 567 |
+
103:'FX 8 (sci-fi)',
|
| 568 |
+
104:'Sitar',
|
| 569 |
+
105:'Banjo',
|
| 570 |
+
106:'Shamisen',
|
| 571 |
+
107:'Koto',
|
| 572 |
+
108:'Kalimba',
|
| 573 |
+
109:'Bagpipe',
|
| 574 |
+
110:'Fiddle',
|
| 575 |
+
111:'Shanai',
|
| 576 |
+
112:'Tinkle Bell',
|
| 577 |
+
113:'Agogo',
|
| 578 |
+
114:'Steel Drums',
|
| 579 |
+
115:'Woodblock',
|
| 580 |
+
116:'Taiko Drum',
|
| 581 |
+
117:'Melodic Tom',
|
| 582 |
+
118:'Synth Drum',
|
| 583 |
+
119:'Reverse Cymbal',
|
| 584 |
+
120:'Guitar Fret Noise',
|
| 585 |
+
121:'Breath Noise',
|
| 586 |
+
122:'Seashore',
|
| 587 |
+
123:'Bird Tweet',
|
| 588 |
+
124:'Telephone Ring',
|
| 589 |
+
125:'Helicopter',
|
| 590 |
+
126:'Applause',
|
| 591 |
+
127:'Gunshot',
|
| 592 |
+
}
|
| 593 |
+
Notenum2percussion = { # General MIDI Percussion (on Channel 9):
|
| 594 |
+
35:'Acoustic Bass Drum',
|
| 595 |
+
36:'Bass Drum 1',
|
| 596 |
+
37:'Side Stick',
|
| 597 |
+
38:'Acoustic Snare',
|
| 598 |
+
39:'Hand Clap',
|
| 599 |
+
40:'Electric Snare',
|
| 600 |
+
41:'Low Floor Tom',
|
| 601 |
+
42:'Closed Hi-Hat',
|
| 602 |
+
43:'High Floor Tom',
|
| 603 |
+
44:'Pedal Hi-Hat',
|
| 604 |
+
45:'Low Tom',
|
| 605 |
+
46:'Open Hi-Hat',
|
| 606 |
+
47:'Low-Mid Tom',
|
| 607 |
+
48:'Hi-Mid Tom',
|
| 608 |
+
49:'Crash Cymbal 1',
|
| 609 |
+
50:'High Tom',
|
| 610 |
+
51:'Ride Cymbal 1',
|
| 611 |
+
52:'Chinese Cymbal',
|
| 612 |
+
53:'Ride Bell',
|
| 613 |
+
54:'Tambourine',
|
| 614 |
+
55:'Splash Cymbal',
|
| 615 |
+
56:'Cowbell',
|
| 616 |
+
57:'Crash Cymbal 2',
|
| 617 |
+
58:'Vibraslap',
|
| 618 |
+
59:'Ride Cymbal 2',
|
| 619 |
+
60:'Hi Bongo',
|
| 620 |
+
61:'Low Bongo',
|
| 621 |
+
62:'Mute Hi Conga',
|
| 622 |
+
63:'Open Hi Conga',
|
| 623 |
+
64:'Low Conga',
|
| 624 |
+
65:'High Timbale',
|
| 625 |
+
66:'Low Timbale',
|
| 626 |
+
67:'High Agogo',
|
| 627 |
+
68:'Low Agogo',
|
| 628 |
+
69:'Cabasa',
|
| 629 |
+
70:'Maracas',
|
| 630 |
+
71:'Short Whistle',
|
| 631 |
+
72:'Long Whistle',
|
| 632 |
+
73:'Short Guiro',
|
| 633 |
+
74:'Long Guiro',
|
| 634 |
+
75:'Claves',
|
| 635 |
+
76:'Hi Wood Block',
|
| 636 |
+
77:'Low Wood Block',
|
| 637 |
+
78:'Mute Cuica',
|
| 638 |
+
79:'Open Cuica',
|
| 639 |
+
80:'Mute Triangle',
|
| 640 |
+
81:'Open Triangle',
|
| 641 |
+
}
|
| 642 |
+
|
| 643 |
+
Event2channelindex = { 'note':3, 'note_off':2, 'note_on':2,
|
| 644 |
+
'key_after_touch':2, 'control_change':2, 'patch_change':2,
|
| 645 |
+
'channel_after_touch':2, 'pitch_wheel_change':2
|
| 646 |
+
}
|
| 647 |
+
|
| 648 |
+
################################################################
|
| 649 |
+
# The code below this line is full of frightening things, all to
|
| 650 |
+
# do with the actual encoding and decoding of binary MIDI data.
|
| 651 |
+
|
| 652 |
+
def _twobytes2int(byte_a):
|
| 653 |
+
r'''decode a 16 bit quantity from two bytes,'''
|
| 654 |
+
return (byte_a[1] | (byte_a[0] << 8))
|
| 655 |
+
|
| 656 |
+
def _int2twobytes(int_16bit):
|
| 657 |
+
r'''encode a 16 bit quantity into two bytes,'''
|
| 658 |
+
return bytes([(int_16bit>>8) & 0xFF, int_16bit & 0xFF])
|
| 659 |
+
|
| 660 |
+
def _read_14_bit(byte_a):
|
| 661 |
+
r'''decode a 14 bit quantity from two bytes,'''
|
| 662 |
+
return (byte_a[0] | (byte_a[1] << 7))
|
| 663 |
+
|
| 664 |
+
def _write_14_bit(int_14bit):
|
| 665 |
+
r'''encode a 14 bit quantity into two bytes,'''
|
| 666 |
+
return bytes([int_14bit & 0x7F, (int_14bit>>7) & 0x7F])
|
| 667 |
+
|
| 668 |
+
def _ber_compressed_int(integer):
|
| 669 |
+
r'''BER compressed integer (not an ASN.1 BER, see perlpacktut for
|
| 670 |
+
details). Its bytes represent an unsigned integer in base 128,
|
| 671 |
+
most significant digit first, with as few digits as possible.
|
| 672 |
+
Bit eight (the high bit) is set on each byte except the last.
|
| 673 |
+
'''
|
| 674 |
+
ber = bytearray(b'')
|
| 675 |
+
seven_bits = 0x7F & integer
|
| 676 |
+
ber.insert(0, seven_bits) # XXX surely should convert to a char ?
|
| 677 |
+
integer >>= 7
|
| 678 |
+
while integer > 0:
|
| 679 |
+
seven_bits = 0x7F & integer
|
| 680 |
+
ber.insert(0, 0x80|seven_bits) # XXX surely should convert to a char ?
|
| 681 |
+
integer >>= 7
|
| 682 |
+
return ber
|
| 683 |
+
|
| 684 |
+
def _unshift_ber_int(ba):
|
| 685 |
+
r'''Given a bytearray, returns a tuple of (the ber-integer at the
|
| 686 |
+
start, and the remainder of the bytearray).
|
| 687 |
+
'''
|
| 688 |
+
if not len(ba): # 6.7
|
| 689 |
+
_warn('_unshift_ber_int: no integer found')
|
| 690 |
+
return ((0, b""))
|
| 691 |
+
byte = ba[0]
|
| 692 |
+
ba = ba[1:]
|
| 693 |
+
integer = 0
|
| 694 |
+
while True:
|
| 695 |
+
integer += (byte & 0x7F)
|
| 696 |
+
if not (byte & 0x80):
|
| 697 |
+
return ((integer, ba))
|
| 698 |
+
if not len(ba):
|
| 699 |
+
_warn('_unshift_ber_int: no end-of-integer found')
|
| 700 |
+
return ((0, ba))
|
| 701 |
+
byte = ba[0]
|
| 702 |
+
ba = ba[1:]
|
| 703 |
+
integer <<= 7
|
| 704 |
+
|
| 705 |
+
|
| 706 |
+
def _clean_up_warnings(): # 5.4
|
| 707 |
+
# Call this before returning from any publicly callable function
|
| 708 |
+
# whenever there's a possibility that a warning might have been printed
|
| 709 |
+
# by the function, or by any private functions it might have called.
|
| 710 |
+
if _no_warning:
|
| 711 |
+
return
|
| 712 |
+
global _previous_times
|
| 713 |
+
global _previous_warning
|
| 714 |
+
if _previous_times > 1:
|
| 715 |
+
# E:1176, 0: invalid syntax (<string>, line 1176) (syntax-error) ???
|
| 716 |
+
# print(' previous message repeated '+str(_previous_times)+' times', file=sys.stderr)
|
| 717 |
+
# 6.7
|
| 718 |
+
sys.stderr.write(' previous message repeated {0} times\n'.format(_previous_times))
|
| 719 |
+
elif _previous_times > 0:
|
| 720 |
+
sys.stderr.write(' previous message repeated\n')
|
| 721 |
+
_previous_times = 0
|
| 722 |
+
_previous_warning = ''
|
| 723 |
+
|
| 724 |
+
|
| 725 |
+
def _warn(s=''):
|
| 726 |
+
if _no_warning:
|
| 727 |
+
return
|
| 728 |
+
global _previous_times
|
| 729 |
+
global _previous_warning
|
| 730 |
+
if s == _previous_warning: # 5.4
|
| 731 |
+
_previous_times = _previous_times + 1
|
| 732 |
+
else:
|
| 733 |
+
_clean_up_warnings()
|
| 734 |
+
sys.stderr.write(str(s) + "\n")
|
| 735 |
+
_previous_warning = s
|
| 736 |
+
|
| 737 |
+
|
| 738 |
+
def _some_text_event(which_kind=0x01, text=b'some_text', text_encoding='ISO-8859-1'):
|
| 739 |
+
if str(type(text)).find("'str'") >= 0: # 6.4 test for back-compatibility
|
| 740 |
+
data = bytes(text, encoding=text_encoding)
|
| 741 |
+
else:
|
| 742 |
+
data = bytes(text)
|
| 743 |
+
return b'\xFF' + bytes((which_kind,)) + _ber_compressed_int(len(data)) + data
|
| 744 |
+
|
| 745 |
+
|
| 746 |
+
def _consistentise_ticks(scores): # 3.6
|
| 747 |
+
# used by mix_scores, merge_scores, concatenate_scores
|
| 748 |
+
if len(scores) == 1:
|
| 749 |
+
return copy.deepcopy(scores)
|
| 750 |
+
are_consistent = True
|
| 751 |
+
ticks = scores[0][0]
|
| 752 |
+
iscore = 1
|
| 753 |
+
while iscore < len(scores):
|
| 754 |
+
if scores[iscore][0] != ticks:
|
| 755 |
+
are_consistent = False
|
| 756 |
+
break
|
| 757 |
+
iscore += 1
|
| 758 |
+
if are_consistent:
|
| 759 |
+
return copy.deepcopy(scores)
|
| 760 |
+
new_scores = []
|
| 761 |
+
iscore = 0
|
| 762 |
+
while iscore < len(scores):
|
| 763 |
+
score = scores[iscore]
|
| 764 |
+
new_scores.append(opus2score(to_millisecs(score2opus(score))))
|
| 765 |
+
iscore += 1
|
| 766 |
+
return new_scores
|
| 767 |
+
|
| 768 |
+
|
| 769 |
+
###########################################################################
|
| 770 |
+
def _decode(trackdata=b'', exclude=None, include=None,
|
| 771 |
+
event_callback=None, exclusive_event_callback=None, no_eot_magic=False):
|
| 772 |
+
r'''Decodes MIDI track data into an opus-style list of events.
|
| 773 |
+
The options:
|
| 774 |
+
'exclude' is a list of event types which will be ignored SHOULD BE A SET
|
| 775 |
+
'include' (and no exclude), makes exclude a list
|
| 776 |
+
of all possible events, /minus/ what include specifies
|
| 777 |
+
'event_callback' is a coderef
|
| 778 |
+
'exclusive_event_callback' is a coderef
|
| 779 |
+
'''
|
| 780 |
+
trackdata = bytearray(trackdata)
|
| 781 |
+
if exclude == None:
|
| 782 |
+
exclude = []
|
| 783 |
+
if include == None:
|
| 784 |
+
include = []
|
| 785 |
+
if include and not exclude:
|
| 786 |
+
exclude = All_events
|
| 787 |
+
include = set(include)
|
| 788 |
+
exclude = set(exclude)
|
| 789 |
+
|
| 790 |
+
# Pointer = 0; not used here; we eat through the bytearray instead.
|
| 791 |
+
event_code = -1; # used for running status
|
| 792 |
+
event_count = 0;
|
| 793 |
+
events = []
|
| 794 |
+
|
| 795 |
+
while (len(trackdata)):
|
| 796 |
+
# loop while there's anything to analyze ...
|
| 797 |
+
eot = False # When True, the event registrar aborts this loop
|
| 798 |
+
event_count += 1
|
| 799 |
+
|
| 800 |
+
E = []
|
| 801 |
+
# E for events - we'll feed it to the event registrar at the end.
|
| 802 |
+
|
| 803 |
+
# Slice off the delta time code, and analyze it
|
| 804 |
+
[time, trackdata] = _unshift_ber_int(trackdata)
|
| 805 |
+
|
| 806 |
+
# Now let's see what we can make of the command
|
| 807 |
+
first_byte = trackdata[0] & 0xFF
|
| 808 |
+
trackdata = trackdata[1:]
|
| 809 |
+
if (first_byte < 0xF0): # It's a MIDI event
|
| 810 |
+
if (first_byte & 0x80):
|
| 811 |
+
event_code = first_byte
|
| 812 |
+
else:
|
| 813 |
+
# It wants running status; use last event_code value
|
| 814 |
+
trackdata.insert(0, first_byte)
|
| 815 |
+
if (event_code == -1):
|
| 816 |
+
_warn("Running status not set; Aborting track.")
|
| 817 |
+
return []
|
| 818 |
+
|
| 819 |
+
command = event_code & 0xF0
|
| 820 |
+
channel = event_code & 0x0F
|
| 821 |
+
|
| 822 |
+
if (command == 0xF6): # 0-byte argument
|
| 823 |
+
pass
|
| 824 |
+
elif (command == 0xC0 or command == 0xD0): # 1-byte argument
|
| 825 |
+
parameter = trackdata[0] # could be B
|
| 826 |
+
trackdata = trackdata[1:]
|
| 827 |
+
else: # 2-byte argument could be BB or 14-bit
|
| 828 |
+
parameter = (trackdata[0], trackdata[1])
|
| 829 |
+
trackdata = trackdata[2:]
|
| 830 |
+
|
| 831 |
+
#################################################################
|
| 832 |
+
# MIDI events
|
| 833 |
+
|
| 834 |
+
if (command == 0x80):
|
| 835 |
+
if 'note_off' in exclude:
|
| 836 |
+
continue
|
| 837 |
+
E = ['note_off', time, channel, parameter[0], parameter[1]]
|
| 838 |
+
elif (command == 0x90):
|
| 839 |
+
if 'note_on' in exclude:
|
| 840 |
+
continue
|
| 841 |
+
E = ['note_on', time, channel, parameter[0], parameter[1]]
|
| 842 |
+
elif (command == 0xA0):
|
| 843 |
+
if 'key_after_touch' in exclude:
|
| 844 |
+
continue
|
| 845 |
+
E = ['key_after_touch', time, channel, parameter[0], parameter[1]]
|
| 846 |
+
elif (command == 0xB0):
|
| 847 |
+
if 'control_change' in exclude:
|
| 848 |
+
continue
|
| 849 |
+
E = ['control_change', time, channel, parameter[0], parameter[1]]
|
| 850 |
+
elif (command == 0xC0):
|
| 851 |
+
if 'patch_change' in exclude:
|
| 852 |
+
continue
|
| 853 |
+
E = ['patch_change', time, channel, parameter]
|
| 854 |
+
elif (command == 0xD0):
|
| 855 |
+
if 'channel_after_touch' in exclude:
|
| 856 |
+
continue
|
| 857 |
+
E = ['channel_after_touch', time, channel, parameter]
|
| 858 |
+
elif (command == 0xE0):
|
| 859 |
+
if 'pitch_wheel_change' in exclude:
|
| 860 |
+
continue
|
| 861 |
+
E = ['pitch_wheel_change', time, channel,
|
| 862 |
+
_read_14_bit(parameter) - 0x2000]
|
| 863 |
+
else:
|
| 864 |
+
_warn("Shouldn't get here; command=" + hex(command))
|
| 865 |
+
|
| 866 |
+
elif (first_byte == 0xFF): # It's a Meta-Event! ##################
|
| 867 |
+
# [command, length, remainder] =
|
| 868 |
+
# unpack("xCwa*", substr(trackdata, $Pointer, 6));
|
| 869 |
+
# Pointer += 6 - len(remainder);
|
| 870 |
+
# # Move past JUST the length-encoded.
|
| 871 |
+
command = trackdata[0] & 0xFF
|
| 872 |
+
trackdata = trackdata[1:]
|
| 873 |
+
[length, trackdata] = _unshift_ber_int(trackdata)
|
| 874 |
+
if (command == 0x00):
|
| 875 |
+
if (length == 2):
|
| 876 |
+
E = ['set_sequence_number', time, _twobytes2int(trackdata)]
|
| 877 |
+
else:
|
| 878 |
+
_warn('set_sequence_number: length must be 2, not ' + str(length))
|
| 879 |
+
E = ['set_sequence_number', time, 0]
|
| 880 |
+
|
| 881 |
+
elif command >= 0x01 and command <= 0x0f: # Text events
|
| 882 |
+
# 6.2 take it in bytes; let the user get the right encoding.
|
| 883 |
+
# text_str = trackdata[0:length].decode('ascii','ignore')
|
| 884 |
+
# text_str = trackdata[0:length].decode('ISO-8859-1')
|
| 885 |
+
# 6.4 take it in bytes; let the user get the right encoding.
|
| 886 |
+
text_data = bytes(trackdata[0:length]) # 6.4
|
| 887 |
+
# Defined text events
|
| 888 |
+
if (command == 0x01):
|
| 889 |
+
E = ['text_event', time, text_data]
|
| 890 |
+
elif (command == 0x02):
|
| 891 |
+
E = ['copyright_text_event', time, text_data]
|
| 892 |
+
elif (command == 0x03):
|
| 893 |
+
E = ['track_name', time, text_data]
|
| 894 |
+
elif (command == 0x04):
|
| 895 |
+
E = ['instrument_name', time, text_data]
|
| 896 |
+
elif (command == 0x05):
|
| 897 |
+
E = ['lyric', time, text_data]
|
| 898 |
+
elif (command == 0x06):
|
| 899 |
+
E = ['marker', time, text_data]
|
| 900 |
+
elif (command == 0x07):
|
| 901 |
+
E = ['cue_point', time, text_data]
|
| 902 |
+
# Reserved but apparently unassigned text events
|
| 903 |
+
elif (command == 0x08):
|
| 904 |
+
E = ['text_event_08', time, text_data]
|
| 905 |
+
elif (command == 0x09):
|
| 906 |
+
E = ['text_event_09', time, text_data]
|
| 907 |
+
elif (command == 0x0a):
|
| 908 |
+
E = ['text_event_0a', time, text_data]
|
| 909 |
+
elif (command == 0x0b):
|
| 910 |
+
E = ['text_event_0b', time, text_data]
|
| 911 |
+
elif (command == 0x0c):
|
| 912 |
+
E = ['text_event_0c', time, text_data]
|
| 913 |
+
elif (command == 0x0d):
|
| 914 |
+
E = ['text_event_0d', time, text_data]
|
| 915 |
+
elif (command == 0x0e):
|
| 916 |
+
E = ['text_event_0e', time, text_data]
|
| 917 |
+
elif (command == 0x0f):
|
| 918 |
+
E = ['text_event_0f', time, text_data]
|
| 919 |
+
|
| 920 |
+
# Now the sticky events -------------------------------------
|
| 921 |
+
elif (command == 0x2F):
|
| 922 |
+
E = ['end_track', time]
|
| 923 |
+
# The code for handling this, oddly, comes LATER,
|
| 924 |
+
# in the event registrar.
|
| 925 |
+
elif (command == 0x51): # DTime, Microseconds/Crochet
|
| 926 |
+
if length != 3:
|
| 927 |
+
_warn('set_tempo event, but length=' + str(length))
|
| 928 |
+
E = ['set_tempo', time,
|
| 929 |
+
struct.unpack(">I", b'\x00' + trackdata[0:3])[0]]
|
| 930 |
+
elif (command == 0x54):
|
| 931 |
+
if length != 5: # DTime, HR, MN, SE, FR, FF
|
| 932 |
+
_warn('smpte_offset event, but length=' + str(length))
|
| 933 |
+
E = ['smpte_offset', time] + list(struct.unpack(">BBBBB", trackdata[0:5]))
|
| 934 |
+
elif (command == 0x58):
|
| 935 |
+
if length != 4: # DTime, NN, DD, CC, BB
|
| 936 |
+
_warn('time_signature event, but length=' + str(length))
|
| 937 |
+
E = ['time_signature', time] + list(trackdata[0:4])
|
| 938 |
+
elif (command == 0x59):
|
| 939 |
+
if length != 2: # DTime, SF(signed), MI
|
| 940 |
+
_warn('key_signature event, but length=' + str(length))
|
| 941 |
+
E = ['key_signature', time] + list(struct.unpack(">bB", trackdata[0:2]))
|
| 942 |
+
elif (command == 0x7F): # 6.4
|
| 943 |
+
E = ['sequencer_specific', time, bytes(trackdata[0:length])]
|
| 944 |
+
else:
|
| 945 |
+
E = ['raw_meta_event', time, command,
|
| 946 |
+
bytes(trackdata[0:length])] # 6.0
|
| 947 |
+
# "[uninterpretable meta-event command of length length]"
|
| 948 |
+
# DTime, Command, Binary Data
|
| 949 |
+
# It's uninterpretable; record it as raw_data.
|
| 950 |
+
|
| 951 |
+
# Pointer += length; # Now move Pointer
|
| 952 |
+
trackdata = trackdata[length:]
|
| 953 |
+
|
| 954 |
+
######################################################################
|
| 955 |
+
elif (first_byte == 0xF0 or first_byte == 0xF7):
|
| 956 |
+
# Note that sysexes in MIDI /files/ are different than sysexes
|
| 957 |
+
# in MIDI transmissions!! The vast majority of system exclusive
|
| 958 |
+
# messages will just use the F0 format. For instance, the
|
| 959 |
+
# transmitted message F0 43 12 00 07 F7 would be stored in a
|
| 960 |
+
# MIDI file as F0 05 43 12 00 07 F7. As mentioned above, it is
|
| 961 |
+
# required to include the F7 at the end so that the reader of the
|
| 962 |
+
# MIDI file knows that it has read the entire message. (But the F7
|
| 963 |
+
# is omitted if this is a non-final block in a multiblock sysex;
|
| 964 |
+
# but the F7 (if there) is counted in the message's declared
|
| 965 |
+
# length, so we don't have to think about it anyway.)
|
| 966 |
+
# command = trackdata.pop(0)
|
| 967 |
+
[length, trackdata] = _unshift_ber_int(trackdata)
|
| 968 |
+
if first_byte == 0xF0:
|
| 969 |
+
# 20091008 added ISO-8859-1 to get an 8-bit str
|
| 970 |
+
# 6.4 return bytes instead
|
| 971 |
+
E = ['sysex_f0', time, bytes(trackdata[0:length])]
|
| 972 |
+
else:
|
| 973 |
+
E = ['sysex_f7', time, bytes(trackdata[0:length])]
|
| 974 |
+
trackdata = trackdata[length:]
|
| 975 |
+
|
| 976 |
+
######################################################################
|
| 977 |
+
# Now, the MIDI file spec says:
|
| 978 |
+
# <track data> = <MTrk event>+
|
| 979 |
+
# <MTrk event> = <delta-time> <event>
|
| 980 |
+
# <event> = <MIDI event> | <sysex event> | <meta-event>
|
| 981 |
+
# I know that, on the wire, <MIDI event> can include note_on,
|
| 982 |
+
# note_off, and all the other 8x to Ex events, AND Fx events
|
| 983 |
+
# other than F0, F7, and FF -- namely, <song position msg>,
|
| 984 |
+
# <song select msg>, and <tune request>.
|
| 985 |
+
#
|
| 986 |
+
# Whether these can occur in MIDI files is not clear specified
|
| 987 |
+
# from the MIDI file spec. So, I'm going to assume that
|
| 988 |
+
# they CAN, in practice, occur. I don't know whether it's
|
| 989 |
+
# proper for you to actually emit these into a MIDI file.
|
| 990 |
+
|
| 991 |
+
elif (first_byte == 0xF2): # DTime, Beats
|
| 992 |
+
# <song position msg> ::= F2 <data pair>
|
| 993 |
+
E = ['song_position', time, _read_14_bit(trackdata[:2])]
|
| 994 |
+
trackdata = trackdata[2:]
|
| 995 |
+
|
| 996 |
+
elif (first_byte == 0xF3): # <song select msg> ::= F3 <data singlet>
|
| 997 |
+
# E = ['song_select', time, struct.unpack('>B',trackdata.pop(0))[0]]
|
| 998 |
+
E = ['song_select', time, trackdata[0]]
|
| 999 |
+
trackdata = trackdata[1:]
|
| 1000 |
+
# DTime, Thing (what?! song number? whatever ...)
|
| 1001 |
+
|
| 1002 |
+
elif (first_byte == 0xF6): # DTime
|
| 1003 |
+
E = ['tune_request', time]
|
| 1004 |
+
# What would a tune request be doing in a MIDI /file/?
|
| 1005 |
+
|
| 1006 |
+
#########################################################
|
| 1007 |
+
# ADD MORE META-EVENTS HERE. TODO:
|
| 1008 |
+
# f1 -- MTC Quarter Frame Message. One data byte follows
|
| 1009 |
+
# the Status; it's the time code value, from 0 to 127.
|
| 1010 |
+
# f8 -- MIDI clock. no data.
|
| 1011 |
+
# fa -- MIDI start. no data.
|
| 1012 |
+
# fb -- MIDI continue. no data.
|
| 1013 |
+
# fc -- MIDI stop. no data.
|
| 1014 |
+
# fe -- Active sense. no data.
|
| 1015 |
+
# f4 f5 f9 fd -- unallocated
|
| 1016 |
+
|
| 1017 |
+
r'''
|
| 1018 |
+
elif (first_byte > 0xF0) { # Some unknown kinda F-series event ####
|
| 1019 |
+
# Here we only produce a one-byte piece of raw data.
|
| 1020 |
+
# But the encoder for 'raw_data' accepts any length of it.
|
| 1021 |
+
E = [ 'raw_data',
|
| 1022 |
+
time, substr(trackdata,Pointer,1) ]
|
| 1023 |
+
# DTime and the Data (in this case, the one Event-byte)
|
| 1024 |
+
++Pointer; # itself
|
| 1025 |
+
|
| 1026 |
+
'''
|
| 1027 |
+
elif first_byte > 0xF0: # Some unknown F-series event
|
| 1028 |
+
# Here we only produce a one-byte piece of raw data.
|
| 1029 |
+
# E = ['raw_data', time, bytest(trackdata[0])] # 6.4
|
| 1030 |
+
E = ['raw_data', time, trackdata[0]] # 6.4 6.7
|
| 1031 |
+
trackdata = trackdata[1:]
|
| 1032 |
+
else: # Fallthru.
|
| 1033 |
+
_warn("Aborting track. Command-byte first_byte=" + hex(first_byte))
|
| 1034 |
+
break
|
| 1035 |
+
# End of the big if-group
|
| 1036 |
+
|
| 1037 |
+
######################################################################
|
| 1038 |
+
# THE EVENT REGISTRAR...
|
| 1039 |
+
if E and (E[0] == 'end_track'):
|
| 1040 |
+
# This is the code for exceptional handling of the EOT event.
|
| 1041 |
+
eot = True
|
| 1042 |
+
if not no_eot_magic:
|
| 1043 |
+
if E[1] > 0: # a null text-event to carry the delta-time
|
| 1044 |
+
E = ['text_event', E[1], '']
|
| 1045 |
+
else:
|
| 1046 |
+
E = [] # EOT with a delta-time of 0; ignore it.
|
| 1047 |
+
|
| 1048 |
+
if E and not (E[0] in exclude):
|
| 1049 |
+
# if ( $exclusive_event_callback ):
|
| 1050 |
+
# &{ $exclusive_event_callback }( @E );
|
| 1051 |
+
# else:
|
| 1052 |
+
# &{ $event_callback }( @E ) if $event_callback;
|
| 1053 |
+
events.append(E)
|
| 1054 |
+
if eot:
|
| 1055 |
+
break
|
| 1056 |
+
|
| 1057 |
+
# End of the big "Event" while-block
|
| 1058 |
+
|
| 1059 |
+
return events
|
| 1060 |
+
|
| 1061 |
+
###################################################################################
|
| 1062 |
+
###################################################################################
|
| 1063 |
+
###################################################################################
|
| 1064 |
+
|
| 1065 |
+
import os
|
| 1066 |
+
|
| 1067 |
+
import datetime
|
| 1068 |
+
|
| 1069 |
+
import copy
|
| 1070 |
+
|
| 1071 |
+
from datetime import datetime
|
| 1072 |
+
|
| 1073 |
+
import secrets
|
| 1074 |
+
|
| 1075 |
+
import random
|
| 1076 |
+
|
| 1077 |
+
import pickle
|
| 1078 |
+
|
| 1079 |
+
import tqdm
|
| 1080 |
+
|
| 1081 |
+
import multiprocessing
|
| 1082 |
+
|
| 1083 |
+
from collections import Counter
|
| 1084 |
+
|
| 1085 |
+
from itertools import combinations
|
| 1086 |
+
|
| 1087 |
+
import sys
|
| 1088 |
+
|
| 1089 |
+
import statistics
|
| 1090 |
+
import math
|
| 1091 |
+
|
| 1092 |
+
from collections import defaultdict
|
| 1093 |
+
|
| 1094 |
+
try:
|
| 1095 |
+
import cupy as np
|
| 1096 |
+
print('CuPy is found!')
|
| 1097 |
+
print('Will use CuPy and GPU for processing!')
|
| 1098 |
+
|
| 1099 |
+
except:
|
| 1100 |
+
import numpy as np
|
| 1101 |
+
print('Could not load CuPy!')
|
| 1102 |
+
print('Will use NumPy and CPU for processing!')
|
| 1103 |
+
|
| 1104 |
+
import shutil
|
| 1105 |
+
|
| 1106 |
+
print('=' * 70)
|
| 1107 |
+
|
| 1108 |
+
###################################################################################
|
| 1109 |
+
###################################################################################
|
| 1110 |
+
|
| 1111 |
+
ALL_CHORDS_SORTED = [[0], [0, 2], [0, 3], [0, 4], [0, 2, 4], [0, 5], [0, 2, 5], [0, 3, 5], [0, 6],
|
| 1112 |
+
[0, 2, 6], [0, 3, 6], [0, 4, 6], [0, 2, 4, 6], [0, 7], [0, 2, 7], [0, 3, 7],
|
| 1113 |
+
[0, 4, 7], [0, 5, 7], [0, 2, 4, 7], [0, 2, 5, 7], [0, 3, 5, 7], [0, 8],
|
| 1114 |
+
[0, 2, 8], [0, 3, 8], [0, 4, 8], [0, 5, 8], [0, 6, 8], [0, 2, 4, 8],
|
| 1115 |
+
[0, 2, 5, 8], [0, 2, 6, 8], [0, 3, 5, 8], [0, 3, 6, 8], [0, 4, 6, 8],
|
| 1116 |
+
[0, 2, 4, 6, 8], [0, 9], [0, 2, 9], [0, 3, 9], [0, 4, 9], [0, 5, 9], [0, 6, 9],
|
| 1117 |
+
[0, 7, 9], [0, 2, 4, 9], [0, 2, 5, 9], [0, 2, 6, 9], [0, 2, 7, 9],
|
| 1118 |
+
[0, 3, 5, 9], [0, 3, 6, 9], [0, 3, 7, 9], [0, 4, 6, 9], [0, 4, 7, 9],
|
| 1119 |
+
[0, 5, 7, 9], [0, 2, 4, 6, 9], [0, 2, 4, 7, 9], [0, 2, 5, 7, 9],
|
| 1120 |
+
[0, 3, 5, 7, 9], [0, 10], [0, 2, 10], [0, 3, 10], [0, 4, 10], [0, 5, 10],
|
| 1121 |
+
[0, 6, 10], [0, 7, 10], [0, 8, 10], [0, 2, 4, 10], [0, 2, 5, 10],
|
| 1122 |
+
[0, 2, 6, 10], [0, 2, 7, 10], [0, 2, 8, 10], [0, 3, 5, 10], [0, 3, 6, 10],
|
| 1123 |
+
[0, 3, 7, 10], [0, 3, 8, 10], [0, 4, 6, 10], [0, 4, 7, 10], [0, 4, 8, 10],
|
| 1124 |
+
[0, 5, 7, 10], [0, 5, 8, 10], [0, 6, 8, 10], [0, 2, 4, 6, 10],
|
| 1125 |
+
[0, 2, 4, 7, 10], [0, 2, 4, 8, 10], [0, 2, 5, 7, 10], [0, 2, 5, 8, 10],
|
| 1126 |
+
[0, 2, 6, 8, 10], [0, 3, 5, 7, 10], [0, 3, 5, 8, 10], [0, 3, 6, 8, 10],
|
| 1127 |
+
[0, 4, 6, 8, 10], [0, 2, 4, 6, 8, 10], [1], [1, 3], [1, 4], [1, 5], [1, 3, 5],
|
| 1128 |
+
[1, 6], [1, 3, 6], [1, 4, 6], [1, 7], [1, 3, 7], [1, 4, 7], [1, 5, 7],
|
| 1129 |
+
[1, 3, 5, 7], [1, 8], [1, 3, 8], [1, 4, 8], [1, 5, 8], [1, 6, 8], [1, 3, 5, 8],
|
| 1130 |
+
[1, 3, 6, 8], [1, 4, 6, 8], [1, 9], [1, 3, 9], [1, 4, 9], [1, 5, 9], [1, 6, 9],
|
| 1131 |
+
[1, 7, 9], [1, 3, 5, 9], [1, 3, 6, 9], [1, 3, 7, 9], [1, 4, 6, 9],
|
| 1132 |
+
[1, 4, 7, 9], [1, 5, 7, 9], [1, 3, 5, 7, 9], [1, 10], [1, 3, 10], [1, 4, 10],
|
| 1133 |
+
[1, 5, 10], [1, 6, 10], [1, 7, 10], [1, 8, 10], [1, 3, 5, 10], [1, 3, 6, 10],
|
| 1134 |
+
[1, 3, 7, 10], [1, 3, 8, 10], [1, 4, 6, 10], [1, 4, 7, 10], [1, 4, 8, 10],
|
| 1135 |
+
[1, 5, 7, 10], [1, 5, 8, 10], [1, 6, 8, 10], [1, 3, 5, 7, 10],
|
| 1136 |
+
[1, 3, 5, 8, 10], [1, 3, 6, 8, 10], [1, 4, 6, 8, 10], [1, 11], [1, 3, 11],
|
| 1137 |
+
[1, 4, 11], [1, 5, 11], [1, 6, 11], [1, 7, 11], [1, 8, 11], [1, 9, 11],
|
| 1138 |
+
[1, 3, 5, 11], [1, 3, 6, 11], [1, 3, 7, 11], [1, 3, 8, 11], [1, 3, 9, 11],
|
| 1139 |
+
[1, 4, 6, 11], [1, 4, 7, 11], [1, 4, 8, 11], [1, 4, 9, 11], [1, 5, 7, 11],
|
| 1140 |
+
[1, 5, 8, 11], [1, 5, 9, 11], [1, 6, 8, 11], [1, 6, 9, 11], [1, 7, 9, 11],
|
| 1141 |
+
[1, 3, 5, 7, 11], [1, 3, 5, 8, 11], [1, 3, 5, 9, 11], [1, 3, 6, 8, 11],
|
| 1142 |
+
[1, 3, 6, 9, 11], [1, 3, 7, 9, 11], [1, 4, 6, 8, 11], [1, 4, 6, 9, 11],
|
| 1143 |
+
[1, 4, 7, 9, 11], [1, 5, 7, 9, 11], [1, 3, 5, 7, 9, 11], [2], [2, 4], [2, 5],
|
| 1144 |
+
[2, 6], [2, 4, 6], [2, 7], [2, 4, 7], [2, 5, 7], [2, 8], [2, 4, 8], [2, 5, 8],
|
| 1145 |
+
[2, 6, 8], [2, 4, 6, 8], [2, 9], [2, 4, 9], [2, 5, 9], [2, 6, 9], [2, 7, 9],
|
| 1146 |
+
[2, 4, 6, 9], [2, 4, 7, 9], [2, 5, 7, 9], [2, 10], [2, 4, 10], [2, 5, 10],
|
| 1147 |
+
[2, 6, 10], [2, 7, 10], [2, 8, 10], [2, 4, 6, 10], [2, 4, 7, 10],
|
| 1148 |
+
[2, 4, 8, 10], [2, 5, 7, 10], [2, 5, 8, 10], [2, 6, 8, 10], [2, 4, 6, 8, 10],
|
| 1149 |
+
[2, 11], [2, 4, 11], [2, 5, 11], [2, 6, 11], [2, 7, 11], [2, 8, 11],
|
| 1150 |
+
[2, 9, 11], [2, 4, 6, 11], [2, 4, 7, 11], [2, 4, 8, 11], [2, 4, 9, 11],
|
| 1151 |
+
[2, 5, 7, 11], [2, 5, 8, 11], [2, 5, 9, 11], [2, 6, 8, 11], [2, 6, 9, 11],
|
| 1152 |
+
[2, 7, 9, 11], [2, 4, 6, 8, 11], [2, 4, 6, 9, 11], [2, 4, 7, 9, 11],
|
| 1153 |
+
[2, 5, 7, 9, 11], [3], [3, 5], [3, 6], [3, 7], [3, 5, 7], [3, 8], [3, 5, 8],
|
| 1154 |
+
[3, 6, 8], [3, 9], [3, 5, 9], [3, 6, 9], [3, 7, 9], [3, 5, 7, 9], [3, 10],
|
| 1155 |
+
[3, 5, 10], [3, 6, 10], [3, 7, 10], [3, 8, 10], [3, 5, 7, 10], [3, 5, 8, 10],
|
| 1156 |
+
[3, 6, 8, 10], [3, 11], [3, 5, 11], [3, 6, 11], [3, 7, 11], [3, 8, 11],
|
| 1157 |
+
[3, 9, 11], [3, 5, 7, 11], [3, 5, 8, 11], [3, 5, 9, 11], [3, 6, 8, 11],
|
| 1158 |
+
[3, 6, 9, 11], [3, 7, 9, 11], [3, 5, 7, 9, 11], [4], [4, 6], [4, 7], [4, 8],
|
| 1159 |
+
[4, 6, 8], [4, 9], [4, 6, 9], [4, 7, 9], [4, 10], [4, 6, 10], [4, 7, 10],
|
| 1160 |
+
[4, 8, 10], [4, 6, 8, 10], [4, 11], [4, 6, 11], [4, 7, 11], [4, 8, 11],
|
| 1161 |
+
[4, 9, 11], [4, 6, 8, 11], [4, 6, 9, 11], [4, 7, 9, 11], [5], [5, 7], [5, 8],
|
| 1162 |
+
[5, 9], [5, 7, 9], [5, 10], [5, 7, 10], [5, 8, 10], [5, 11], [5, 7, 11],
|
| 1163 |
+
[5, 8, 11], [5, 9, 11], [5, 7, 9, 11], [6], [6, 8], [6, 9], [6, 10],
|
| 1164 |
+
[6, 8, 10], [6, 11], [6, 8, 11], [6, 9, 11], [7], [7, 9], [7, 10], [7, 11],
|
| 1165 |
+
[7, 9, 11], [8], [8, 10], [8, 11], [9], [9, 11], [10], [11]]
|
| 1166 |
+
|
| 1167 |
+
###################################################################################
|
| 1168 |
+
|
| 1169 |
+
ALL_CHORDS_FULL = [[0], [0, 3], [0, 3, 5], [0, 3, 5, 8], [0, 3, 5, 9], [0, 3, 5, 10], [0, 3, 6],
|
| 1170 |
+
[0, 3, 6, 9], [0, 3, 6, 10], [0, 3, 7], [0, 3, 7, 10], [0, 3, 8], [0, 3, 9],
|
| 1171 |
+
[0, 3, 10], [0, 4], [0, 4, 6], [0, 4, 6, 9], [0, 4, 6, 10], [0, 4, 7],
|
| 1172 |
+
[0, 4, 7, 10], [0, 4, 8], [0, 4, 9], [0, 4, 10], [0, 5], [0, 5, 8], [0, 5, 9],
|
| 1173 |
+
[0, 5, 10], [0, 6], [0, 6, 9], [0, 6, 10], [0, 7], [0, 7, 10], [0, 8], [0, 9],
|
| 1174 |
+
[0, 10], [1], [1, 4], [1, 4, 6], [1, 4, 6, 9], [1, 4, 6, 10], [1, 4, 6, 11],
|
| 1175 |
+
[1, 4, 7], [1, 4, 7, 10], [1, 4, 7, 11], [1, 4, 8], [1, 4, 8, 11], [1, 4, 9],
|
| 1176 |
+
[1, 4, 10], [1, 4, 11], [1, 5], [1, 5, 8], [1, 5, 8, 11], [1, 5, 9],
|
| 1177 |
+
[1, 5, 10], [1, 5, 11], [1, 6], [1, 6, 9], [1, 6, 10], [1, 6, 11], [1, 7],
|
| 1178 |
+
[1, 7, 10], [1, 7, 11], [1, 8], [1, 8, 11], [1, 9], [1, 10], [1, 11], [2],
|
| 1179 |
+
[2, 5], [2, 5, 8], [2, 5, 8, 11], [2, 5, 9], [2, 5, 10], [2, 5, 11], [2, 6],
|
| 1180 |
+
[2, 6, 9], [2, 6, 10], [2, 6, 11], [2, 7], [2, 7, 10], [2, 7, 11], [2, 8],
|
| 1181 |
+
[2, 8, 11], [2, 9], [2, 10], [2, 11], [3], [3, 5], [3, 5, 8], [3, 5, 8, 11],
|
| 1182 |
+
[3, 5, 9], [3, 5, 10], [3, 5, 11], [3, 6], [3, 6, 9], [3, 6, 10], [3, 6, 11],
|
| 1183 |
+
[3, 7], [3, 7, 10], [3, 7, 11], [3, 8], [3, 8, 11], [3, 9], [3, 10], [3, 11],
|
| 1184 |
+
[4], [4, 6], [4, 6, 9], [4, 6, 10], [4, 6, 11], [4, 7], [4, 7, 10], [4, 7, 11],
|
| 1185 |
+
[4, 8], [4, 8, 11], [4, 9], [4, 10], [4, 11], [5], [5, 8], [5, 8, 11], [5, 9],
|
| 1186 |
+
[5, 10], [5, 11], [6], [6, 9], [6, 10], [6, 11], [7], [7, 10], [7, 11], [8],
|
| 1187 |
+
[8, 11], [9], [10], [11]]
|
| 1188 |
+
|
| 1189 |
+
###################################################################################
|
| 1190 |
+
###################################################################################
|
| 1191 |
+
|
| 1192 |
+
def create_files_list(datasets_paths=['./'],
|
| 1193 |
+
files_exts=['.mid', '.midi', '.kar', '.MID', '.MIDI', '.KAR'],
|
| 1194 |
+
randomize_files_list=True,
|
| 1195 |
+
verbose=True
|
| 1196 |
+
):
|
| 1197 |
+
if verbose:
|
| 1198 |
+
print('=' * 70)
|
| 1199 |
+
print('Searching for files...')
|
| 1200 |
+
print('This may take a while on a large dataset in particular...')
|
| 1201 |
+
print('=' * 70)
|
| 1202 |
+
|
| 1203 |
+
filez_set = defaultdict(None)
|
| 1204 |
+
|
| 1205 |
+
files_exts = tuple(files_exts)
|
| 1206 |
+
|
| 1207 |
+
for dataset_addr in tqdm.tqdm(datasets_paths):
|
| 1208 |
+
for dirpath, dirnames, filenames in os.walk(dataset_addr):
|
| 1209 |
+
for file in filenames:
|
| 1210 |
+
if file not in filez_set and file.endswith(files_exts):
|
| 1211 |
+
filez_set[os.path.join(dirpath, file)] = None
|
| 1212 |
+
|
| 1213 |
+
filez = list(filez_set.keys())
|
| 1214 |
+
|
| 1215 |
+
if verbose:
|
| 1216 |
+
print('Done!')
|
| 1217 |
+
print('=' * 70)
|
| 1218 |
+
|
| 1219 |
+
if filez:
|
| 1220 |
+
if randomize_files_list:
|
| 1221 |
+
|
| 1222 |
+
if verbose:
|
| 1223 |
+
print('Randomizing file list...')
|
| 1224 |
+
|
| 1225 |
+
random.shuffle(filez)
|
| 1226 |
+
|
| 1227 |
+
if verbose:
|
| 1228 |
+
print('Done!')
|
| 1229 |
+
print('=' * 70)
|
| 1230 |
+
|
| 1231 |
+
if verbose:
|
| 1232 |
+
print('Found', len(filez), 'files.')
|
| 1233 |
+
print('=' * 70)
|
| 1234 |
+
|
| 1235 |
+
else:
|
| 1236 |
+
if verbose:
|
| 1237 |
+
print('Could not find any files...')
|
| 1238 |
+
print('Please check dataset dirs and files extensions...')
|
| 1239 |
+
print('=' * 70)
|
| 1240 |
+
|
| 1241 |
+
return filez
|
| 1242 |
+
|
| 1243 |
+
###################################################################################
|
| 1244 |
+
|
| 1245 |
+
def check_and_fix_tones_chord(tones_chord, use_full_chords=True):
|
| 1246 |
+
|
| 1247 |
+
tones_chord_combs = [list(comb) for i in range(len(tones_chord), 0, -1) for comb in combinations(tones_chord, i)]
|
| 1248 |
+
|
| 1249 |
+
if use_full_chords:
|
| 1250 |
+
CHORDS = ALL_CHORDS_FULL
|
| 1251 |
+
|
| 1252 |
+
else:
|
| 1253 |
+
CHORDS = ALL_CHORDS_SORTED
|
| 1254 |
+
|
| 1255 |
+
for c in tones_chord_combs:
|
| 1256 |
+
if c in CHORDS:
|
| 1257 |
+
checked_tones_chord = c
|
| 1258 |
+
break
|
| 1259 |
+
|
| 1260 |
+
return sorted(checked_tones_chord)
|
| 1261 |
+
|
| 1262 |
+
###################################################################################
|
| 1263 |
+
|
| 1264 |
+
def chordify_score(score,
|
| 1265 |
+
return_choridfied_score=True,
|
| 1266 |
+
return_detected_score_information=False
|
| 1267 |
+
):
|
| 1268 |
+
|
| 1269 |
+
if score:
|
| 1270 |
+
|
| 1271 |
+
num_tracks = 1
|
| 1272 |
+
single_track_score = []
|
| 1273 |
+
score_num_ticks = 0
|
| 1274 |
+
|
| 1275 |
+
if type(score[0]) == int and len(score) > 1:
|
| 1276 |
+
|
| 1277 |
+
score_type = 'MIDI_PY'
|
| 1278 |
+
score_num_ticks = score[0]
|
| 1279 |
+
|
| 1280 |
+
while num_tracks < len(score):
|
| 1281 |
+
for event in score[num_tracks]:
|
| 1282 |
+
single_track_score.append(event)
|
| 1283 |
+
num_tracks += 1
|
| 1284 |
+
|
| 1285 |
+
else:
|
| 1286 |
+
score_type = 'CUSTOM'
|
| 1287 |
+
single_track_score = score
|
| 1288 |
+
|
| 1289 |
+
if single_track_score and single_track_score[0]:
|
| 1290 |
+
|
| 1291 |
+
try:
|
| 1292 |
+
|
| 1293 |
+
if type(single_track_score[0][0]) == str or single_track_score[0][0] == 'note':
|
| 1294 |
+
single_track_score.sort(key = lambda x: x[1])
|
| 1295 |
+
score_timings = [s[1] for s in single_track_score]
|
| 1296 |
+
else:
|
| 1297 |
+
score_timings = [s[0] for s in single_track_score]
|
| 1298 |
+
|
| 1299 |
+
is_score_time_absolute = lambda sct: all(x <= y for x, y in zip(sct, sct[1:]))
|
| 1300 |
+
|
| 1301 |
+
score_timings_type = ''
|
| 1302 |
+
|
| 1303 |
+
if is_score_time_absolute(score_timings):
|
| 1304 |
+
score_timings_type = 'ABS'
|
| 1305 |
+
|
| 1306 |
+
chords = []
|
| 1307 |
+
cho = []
|
| 1308 |
+
|
| 1309 |
+
if score_type == 'MIDI_PY':
|
| 1310 |
+
pe = single_track_score[0]
|
| 1311 |
+
else:
|
| 1312 |
+
pe = single_track_score[0]
|
| 1313 |
+
|
| 1314 |
+
for e in single_track_score:
|
| 1315 |
+
|
| 1316 |
+
if score_type == 'MIDI_PY':
|
| 1317 |
+
time = e[1]
|
| 1318 |
+
ptime = pe[1]
|
| 1319 |
+
else:
|
| 1320 |
+
time = e[0]
|
| 1321 |
+
ptime = pe[0]
|
| 1322 |
+
|
| 1323 |
+
if time == ptime:
|
| 1324 |
+
cho.append(e)
|
| 1325 |
+
|
| 1326 |
+
else:
|
| 1327 |
+
if len(cho) > 0:
|
| 1328 |
+
chords.append(cho)
|
| 1329 |
+
cho = []
|
| 1330 |
+
cho.append(e)
|
| 1331 |
+
|
| 1332 |
+
pe = e
|
| 1333 |
+
|
| 1334 |
+
if len(cho) > 0:
|
| 1335 |
+
chords.append(cho)
|
| 1336 |
+
|
| 1337 |
+
else:
|
| 1338 |
+
score_timings_type = 'REL'
|
| 1339 |
+
|
| 1340 |
+
chords = []
|
| 1341 |
+
cho = []
|
| 1342 |
+
|
| 1343 |
+
for e in single_track_score:
|
| 1344 |
+
|
| 1345 |
+
if score_type == 'MIDI_PY':
|
| 1346 |
+
time = e[1]
|
| 1347 |
+
else:
|
| 1348 |
+
time = e[0]
|
| 1349 |
+
|
| 1350 |
+
if time == 0:
|
| 1351 |
+
cho.append(e)
|
| 1352 |
+
|
| 1353 |
+
else:
|
| 1354 |
+
if len(cho) > 0:
|
| 1355 |
+
chords.append(cho)
|
| 1356 |
+
cho = []
|
| 1357 |
+
cho.append(e)
|
| 1358 |
+
|
| 1359 |
+
if len(cho) > 0:
|
| 1360 |
+
chords.append(cho)
|
| 1361 |
+
|
| 1362 |
+
requested_data = []
|
| 1363 |
+
|
| 1364 |
+
if return_detected_score_information:
|
| 1365 |
+
|
| 1366 |
+
detected_score_information = []
|
| 1367 |
+
|
| 1368 |
+
detected_score_information.append(['Score type', score_type])
|
| 1369 |
+
detected_score_information.append(['Score timings type', score_timings_type])
|
| 1370 |
+
detected_score_information.append(['Score tpq', score_num_ticks])
|
| 1371 |
+
detected_score_information.append(['Score number of tracks', num_tracks])
|
| 1372 |
+
|
| 1373 |
+
requested_data.append(detected_score_information)
|
| 1374 |
+
|
| 1375 |
+
if return_choridfied_score and return_detected_score_information:
|
| 1376 |
+
requested_data.append(chords)
|
| 1377 |
+
|
| 1378 |
+
if return_choridfied_score and not return_detected_score_information:
|
| 1379 |
+
requested_data.extend(chords)
|
| 1380 |
+
|
| 1381 |
+
return requested_data
|
| 1382 |
+
|
| 1383 |
+
except Exception as e:
|
| 1384 |
+
print('Error!')
|
| 1385 |
+
print('Check score for consistency and compatibility!')
|
| 1386 |
+
print('Exception detected:', e)
|
| 1387 |
+
|
| 1388 |
+
else:
|
| 1389 |
+
return None
|
| 1390 |
+
|
| 1391 |
+
else:
|
| 1392 |
+
return None
|
| 1393 |
+
|
| 1394 |
+
###################################################################################
|
| 1395 |
+
|
| 1396 |
+
def augment_enhanced_score_notes(enhanced_score_notes,
|
| 1397 |
+
timings_divider=16,
|
| 1398 |
+
full_sorting=True,
|
| 1399 |
+
timings_shift=0,
|
| 1400 |
+
pitch_shift=0,
|
| 1401 |
+
ceil_timings=False,
|
| 1402 |
+
round_timings=False,
|
| 1403 |
+
legacy_timings=True,
|
| 1404 |
+
sort_drums_last=False
|
| 1405 |
+
):
|
| 1406 |
+
|
| 1407 |
+
esn = copy.deepcopy(enhanced_score_notes)
|
| 1408 |
+
|
| 1409 |
+
pe = enhanced_score_notes[0]
|
| 1410 |
+
|
| 1411 |
+
abs_time = max(0, int(enhanced_score_notes[0][1] / timings_divider))
|
| 1412 |
+
|
| 1413 |
+
for i, e in enumerate(esn):
|
| 1414 |
+
|
| 1415 |
+
dtime = (e[1] / timings_divider) - (pe[1] / timings_divider)
|
| 1416 |
+
|
| 1417 |
+
if round_timings:
|
| 1418 |
+
dtime = round(dtime)
|
| 1419 |
+
|
| 1420 |
+
else:
|
| 1421 |
+
if ceil_timings:
|
| 1422 |
+
dtime = math.ceil(dtime)
|
| 1423 |
+
|
| 1424 |
+
else:
|
| 1425 |
+
dtime = int(dtime)
|
| 1426 |
+
|
| 1427 |
+
if legacy_timings:
|
| 1428 |
+
abs_time = int(e[1] / timings_divider) + timings_shift
|
| 1429 |
+
|
| 1430 |
+
else:
|
| 1431 |
+
abs_time += dtime
|
| 1432 |
+
|
| 1433 |
+
e[1] = max(0, abs_time + timings_shift)
|
| 1434 |
+
|
| 1435 |
+
if round_timings:
|
| 1436 |
+
e[2] = max(1, round(e[2] / timings_divider)) + timings_shift
|
| 1437 |
+
|
| 1438 |
+
else:
|
| 1439 |
+
if ceil_timings:
|
| 1440 |
+
e[2] = max(1, math.ceil(e[2] / timings_divider)) + timings_shift
|
| 1441 |
+
else:
|
| 1442 |
+
e[2] = max(1, int(e[2] / timings_divider)) + timings_shift
|
| 1443 |
+
|
| 1444 |
+
e[4] = max(1, min(127, e[4] + pitch_shift))
|
| 1445 |
+
|
| 1446 |
+
pe = enhanced_score_notes[i]
|
| 1447 |
+
|
| 1448 |
+
if full_sorting:
|
| 1449 |
+
|
| 1450 |
+
# Sorting by patch, reverse pitch and start-time
|
| 1451 |
+
esn.sort(key=lambda x: x[6])
|
| 1452 |
+
esn.sort(key=lambda x: x[4], reverse=True)
|
| 1453 |
+
esn.sort(key=lambda x: x[1])
|
| 1454 |
+
|
| 1455 |
+
if sort_drums_last:
|
| 1456 |
+
esn.sort(key=lambda x: (x[1], -x[4], x[6]) if x[6] != 128 else (x[1], x[6], -x[4]))
|
| 1457 |
+
|
| 1458 |
+
return esn
|
| 1459 |
+
|
| 1460 |
+
###################################################################################
|
| 1461 |
+
|
| 1462 |
+
def advanced_score_processor(raw_score,
|
| 1463 |
+
patches_to_analyze=list(range(129)),
|
| 1464 |
+
return_score_analysis=False,
|
| 1465 |
+
return_enhanced_score=False,
|
| 1466 |
+
return_enhanced_score_notes=False,
|
| 1467 |
+
return_enhanced_monophonic_melody=False,
|
| 1468 |
+
return_chordified_enhanced_score=False,
|
| 1469 |
+
return_chordified_enhanced_score_with_lyrics=False,
|
| 1470 |
+
return_score_tones_chords=False,
|
| 1471 |
+
return_text_and_lyric_events=False
|
| 1472 |
+
):
|
| 1473 |
+
|
| 1474 |
+
'''TMIDIX Advanced Score Processor'''
|
| 1475 |
+
|
| 1476 |
+
# Score data types detection
|
| 1477 |
+
|
| 1478 |
+
if raw_score and type(raw_score) == list:
|
| 1479 |
+
|
| 1480 |
+
num_ticks = 0
|
| 1481 |
+
num_tracks = 1
|
| 1482 |
+
|
| 1483 |
+
basic_single_track_score = []
|
| 1484 |
+
|
| 1485 |
+
if type(raw_score[0]) != int:
|
| 1486 |
+
if len(raw_score[0]) < 5 and type(raw_score[0][0]) != str:
|
| 1487 |
+
return ['Check score for errors and compatibility!']
|
| 1488 |
+
|
| 1489 |
+
else:
|
| 1490 |
+
basic_single_track_score = copy.deepcopy(raw_score)
|
| 1491 |
+
|
| 1492 |
+
else:
|
| 1493 |
+
num_ticks = raw_score[0]
|
| 1494 |
+
while num_tracks < len(raw_score):
|
| 1495 |
+
for event in raw_score[num_tracks]:
|
| 1496 |
+
ev = copy.deepcopy(event)
|
| 1497 |
+
basic_single_track_score.append(ev)
|
| 1498 |
+
num_tracks += 1
|
| 1499 |
+
|
| 1500 |
+
basic_single_track_score.sort(key=lambda x: x[4] if x[0] == 'note' else 128, reverse=True)
|
| 1501 |
+
basic_single_track_score.sort(key=lambda x: x[1])
|
| 1502 |
+
|
| 1503 |
+
enhanced_single_track_score = []
|
| 1504 |
+
patches = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
|
| 1505 |
+
all_score_patches = []
|
| 1506 |
+
num_patch_changes = 0
|
| 1507 |
+
|
| 1508 |
+
for event in basic_single_track_score:
|
| 1509 |
+
if event[0] == 'patch_change':
|
| 1510 |
+
patches[event[2]] = event[3]
|
| 1511 |
+
enhanced_single_track_score.append(event)
|
| 1512 |
+
num_patch_changes += 1
|
| 1513 |
+
|
| 1514 |
+
if event[0] == 'note':
|
| 1515 |
+
if event[3] != 9:
|
| 1516 |
+
event.extend([patches[event[3]]])
|
| 1517 |
+
all_score_patches.extend([patches[event[3]]])
|
| 1518 |
+
else:
|
| 1519 |
+
event.extend([128])
|
| 1520 |
+
all_score_patches.extend([128])
|
| 1521 |
+
|
| 1522 |
+
if enhanced_single_track_score:
|
| 1523 |
+
if (event[1] == enhanced_single_track_score[-1][1]):
|
| 1524 |
+
if ([event[3], event[4]] != enhanced_single_track_score[-1][3:5]):
|
| 1525 |
+
enhanced_single_track_score.append(event)
|
| 1526 |
+
else:
|
| 1527 |
+
enhanced_single_track_score.append(event)
|
| 1528 |
+
|
| 1529 |
+
else:
|
| 1530 |
+
enhanced_single_track_score.append(event)
|
| 1531 |
+
|
| 1532 |
+
if event[0] not in ['note', 'patch_change']:
|
| 1533 |
+
enhanced_single_track_score.append(event)
|
| 1534 |
+
|
| 1535 |
+
enhanced_single_track_score.sort(key=lambda x: x[6] if x[0] == 'note' else -1)
|
| 1536 |
+
enhanced_single_track_score.sort(key=lambda x: x[4] if x[0] == 'note' else 128, reverse=True)
|
| 1537 |
+
enhanced_single_track_score.sort(key=lambda x: x[1])
|
| 1538 |
+
|
| 1539 |
+
# Analysis and chordification
|
| 1540 |
+
|
| 1541 |
+
cscore = []
|
| 1542 |
+
cescore = []
|
| 1543 |
+
chords_tones = []
|
| 1544 |
+
tones_chords = []
|
| 1545 |
+
all_tones = []
|
| 1546 |
+
all_chords_good = True
|
| 1547 |
+
bad_chords = []
|
| 1548 |
+
bad_chords_count = 0
|
| 1549 |
+
score_notes = []
|
| 1550 |
+
score_pitches = []
|
| 1551 |
+
score_patches = []
|
| 1552 |
+
num_text_events = 0
|
| 1553 |
+
num_lyric_events = 0
|
| 1554 |
+
num_other_events = 0
|
| 1555 |
+
text_and_lyric_events = []
|
| 1556 |
+
text_and_lyric_events_latin = None
|
| 1557 |
+
|
| 1558 |
+
analysis = {}
|
| 1559 |
+
|
| 1560 |
+
score_notes = [s for s in enhanced_single_track_score if s[0] == 'note' and s[6] in patches_to_analyze]
|
| 1561 |
+
score_patches = [sn[6] for sn in score_notes]
|
| 1562 |
+
|
| 1563 |
+
if return_text_and_lyric_events:
|
| 1564 |
+
text_and_lyric_events = [e for e in enhanced_single_track_score if e[0] in ['text_event', 'lyric']]
|
| 1565 |
+
|
| 1566 |
+
if text_and_lyric_events:
|
| 1567 |
+
text_and_lyric_events_latin = True
|
| 1568 |
+
for e in text_and_lyric_events:
|
| 1569 |
+
try:
|
| 1570 |
+
tle = str(e[2].decode())
|
| 1571 |
+
except:
|
| 1572 |
+
tle = str(e[2])
|
| 1573 |
+
|
| 1574 |
+
for c in tle:
|
| 1575 |
+
if not 0 <= ord(c) < 128:
|
| 1576 |
+
text_and_lyric_events_latin = False
|
| 1577 |
+
|
| 1578 |
+
if (return_chordified_enhanced_score or return_score_analysis) and any(elem in patches_to_analyze for elem in score_patches):
|
| 1579 |
+
|
| 1580 |
+
cescore = chordify_score([num_ticks, enhanced_single_track_score])
|
| 1581 |
+
|
| 1582 |
+
if return_score_analysis:
|
| 1583 |
+
|
| 1584 |
+
cscore = chordify_score(score_notes)
|
| 1585 |
+
|
| 1586 |
+
score_pitches = [sn[4] for sn in score_notes]
|
| 1587 |
+
|
| 1588 |
+
text_events = [e for e in enhanced_single_track_score if e[0] == 'text_event']
|
| 1589 |
+
num_text_events = len(text_events)
|
| 1590 |
+
|
| 1591 |
+
lyric_events = [e for e in enhanced_single_track_score if e[0] == 'lyric']
|
| 1592 |
+
num_lyric_events = len(lyric_events)
|
| 1593 |
+
|
| 1594 |
+
other_events = [e for e in enhanced_single_track_score if e[0] not in ['note', 'patch_change', 'text_event', 'lyric']]
|
| 1595 |
+
num_other_events = len(other_events)
|
| 1596 |
+
|
| 1597 |
+
for c in cscore:
|
| 1598 |
+
tones = sorted(set([t[4] % 12 for t in c if t[3] != 9]))
|
| 1599 |
+
|
| 1600 |
+
if tones:
|
| 1601 |
+
chords_tones.append(tones)
|
| 1602 |
+
all_tones.extend(tones)
|
| 1603 |
+
|
| 1604 |
+
if tones not in ALL_CHORDS:
|
| 1605 |
+
all_chords_good = False
|
| 1606 |
+
bad_chords.append(tones)
|
| 1607 |
+
bad_chords_count += 1
|
| 1608 |
+
|
| 1609 |
+
analysis['Number of ticks per quarter note'] = num_ticks
|
| 1610 |
+
analysis['Number of tracks'] = num_tracks
|
| 1611 |
+
analysis['Number of all events'] = len(enhanced_single_track_score)
|
| 1612 |
+
analysis['Number of patch change events'] = num_patch_changes
|
| 1613 |
+
analysis['Number of text events'] = num_text_events
|
| 1614 |
+
analysis['Number of lyric events'] = num_lyric_events
|
| 1615 |
+
analysis['All text and lyric events Latin'] = text_and_lyric_events_latin
|
| 1616 |
+
analysis['Number of other events'] = num_other_events
|
| 1617 |
+
analysis['Number of score notes'] = len(score_notes)
|
| 1618 |
+
analysis['Number of score chords'] = len(cscore)
|
| 1619 |
+
analysis['Score patches'] = sorted(set(score_patches))
|
| 1620 |
+
analysis['Score pitches'] = sorted(set(score_pitches))
|
| 1621 |
+
analysis['Score tones'] = sorted(set(all_tones))
|
| 1622 |
+
if chords_tones:
|
| 1623 |
+
analysis['Shortest chord'] = sorted(min(chords_tones, key=len))
|
| 1624 |
+
analysis['Longest chord'] = sorted(max(chords_tones, key=len))
|
| 1625 |
+
analysis['All chords good'] = all_chords_good
|
| 1626 |
+
analysis['Number of bad chords'] = bad_chords_count
|
| 1627 |
+
analysis['Bad chords'] = sorted([list(c) for c in set(tuple(bc) for bc in bad_chords)])
|
| 1628 |
+
|
| 1629 |
+
else:
|
| 1630 |
+
analysis['Error'] = 'Provided score does not have specified patches to analyse'
|
| 1631 |
+
analysis['Provided patches to analyse'] = sorted(patches_to_analyze)
|
| 1632 |
+
analysis['Patches present in the score'] = sorted(set(all_score_patches))
|
| 1633 |
+
|
| 1634 |
+
if return_enhanced_monophonic_melody:
|
| 1635 |
+
|
| 1636 |
+
score_notes_copy = copy.deepcopy(score_notes)
|
| 1637 |
+
chordified_score_notes = chordify_score(score_notes_copy)
|
| 1638 |
+
|
| 1639 |
+
melody = [c[0] for c in chordified_score_notes]
|
| 1640 |
+
|
| 1641 |
+
fixed_melody = []
|
| 1642 |
+
|
| 1643 |
+
for i in range(len(melody)-1):
|
| 1644 |
+
note = melody[i]
|
| 1645 |
+
nmt = melody[i+1][1]
|
| 1646 |
+
|
| 1647 |
+
if note[1]+note[2] >= nmt:
|
| 1648 |
+
note_dur = nmt-note[1]-1
|
| 1649 |
+
else:
|
| 1650 |
+
note_dur = note[2]
|
| 1651 |
+
|
| 1652 |
+
melody[i][2] = note_dur
|
| 1653 |
+
|
| 1654 |
+
fixed_melody.append(melody[i])
|
| 1655 |
+
fixed_melody.append(melody[-1])
|
| 1656 |
+
|
| 1657 |
+
if return_score_tones_chords:
|
| 1658 |
+
cscore = chordify_score(score_notes)
|
| 1659 |
+
for c in cscore:
|
| 1660 |
+
tones_chord = sorted(set([t[4] % 12 for t in c if t[3] != 9]))
|
| 1661 |
+
if tones_chord:
|
| 1662 |
+
tones_chords.append(tones_chord)
|
| 1663 |
+
|
| 1664 |
+
if return_chordified_enhanced_score_with_lyrics:
|
| 1665 |
+
score_with_lyrics = [e for e in enhanced_single_track_score if e[0] in ['note', 'text_event', 'lyric']]
|
| 1666 |
+
chordified_enhanced_score_with_lyrics = chordify_score(score_with_lyrics)
|
| 1667 |
+
|
| 1668 |
+
# Returned data
|
| 1669 |
+
|
| 1670 |
+
requested_data = []
|
| 1671 |
+
|
| 1672 |
+
if return_score_analysis and analysis:
|
| 1673 |
+
requested_data.append([[k, v] for k, v in analysis.items()])
|
| 1674 |
+
|
| 1675 |
+
if return_enhanced_score and enhanced_single_track_score:
|
| 1676 |
+
requested_data.append([num_ticks, enhanced_single_track_score])
|
| 1677 |
+
|
| 1678 |
+
if return_enhanced_score_notes and score_notes:
|
| 1679 |
+
requested_data.append(score_notes)
|
| 1680 |
+
|
| 1681 |
+
if return_enhanced_monophonic_melody and fixed_melody:
|
| 1682 |
+
requested_data.append(fixed_melody)
|
| 1683 |
+
|
| 1684 |
+
if return_chordified_enhanced_score and cescore:
|
| 1685 |
+
requested_data.append(cescore)
|
| 1686 |
+
|
| 1687 |
+
if return_chordified_enhanced_score_with_lyrics and chordified_enhanced_score_with_lyrics:
|
| 1688 |
+
requested_data.append(chordified_enhanced_score_with_lyrics)
|
| 1689 |
+
|
| 1690 |
+
if return_score_tones_chords and tones_chords:
|
| 1691 |
+
requested_data.append(tones_chords)
|
| 1692 |
+
|
| 1693 |
+
if return_text_and_lyric_events and text_and_lyric_events:
|
| 1694 |
+
requested_data.append(text_and_lyric_events)
|
| 1695 |
+
|
| 1696 |
+
return requested_data
|
| 1697 |
+
|
| 1698 |
+
else:
|
| 1699 |
+
return ['Check score for errors and compatibility!']
|
| 1700 |
+
|
| 1701 |
+
###################################################################################
|
| 1702 |
+
|
| 1703 |
+
def load_signatures(signatures_data, omit_drums=True):
|
| 1704 |
+
|
| 1705 |
+
sigs_dicts = []
|
| 1706 |
+
|
| 1707 |
+
for sig in tqdm.tqdm(signatures_data):
|
| 1708 |
+
|
| 1709 |
+
if omit_drums:
|
| 1710 |
+
sig = [sig[0], [s for s in sig[1] if s[0] < 449]]
|
| 1711 |
+
|
| 1712 |
+
sigs_dicts.append([sig[0], dict(sig[1])])
|
| 1713 |
+
|
| 1714 |
+
return sigs_dicts
|
| 1715 |
+
|
| 1716 |
+
###################################################################################
|
| 1717 |
+
|
| 1718 |
+
def get_distance(sig_dict1,
|
| 1719 |
+
sig_dict2,
|
| 1720 |
+
penalty=10,
|
| 1721 |
+
p=3
|
| 1722 |
+
):
|
| 1723 |
+
|
| 1724 |
+
all_keys = set(sig_dict1.keys()) | set(sig_dict2.keys())
|
| 1725 |
+
|
| 1726 |
+
total = 0.0
|
| 1727 |
+
|
| 1728 |
+
for key in all_keys:
|
| 1729 |
+
|
| 1730 |
+
if key in sig_dict1 and key in sig_dict2:
|
| 1731 |
+
a = sig_dict1.get(key, 0)
|
| 1732 |
+
b = sig_dict2.get(key, 0)
|
| 1733 |
+
|
| 1734 |
+
if min(a, b) > 0:
|
| 1735 |
+
ratio = max(a, b) / min(a, b)
|
| 1736 |
+
diff = ratio - 1
|
| 1737 |
+
total += diff ** p
|
| 1738 |
+
|
| 1739 |
+
else:
|
| 1740 |
+
diff = penalty
|
| 1741 |
+
total += diff ** p
|
| 1742 |
+
|
| 1743 |
+
return total ** (1.0 / p)
|
| 1744 |
+
|
| 1745 |
+
###################################################################################
|
| 1746 |
+
|
| 1747 |
+
def get_distance_np(sig_dict1, sig_dict2, penalty=10, p=3):
|
| 1748 |
+
|
| 1749 |
+
keys = np.array(list(set(sig_dict1.keys()) | set(sig_dict2.keys())))
|
| 1750 |
+
# Build frequency arrays aligned just with these keys.
|
| 1751 |
+
freq1 = np.array([sig_dict1.get(k, 0) for k in keys], dtype=float)
|
| 1752 |
+
freq2 = np.array([sig_dict2.get(k, 0) for k in keys], dtype=float)
|
| 1753 |
+
|
| 1754 |
+
mask = (freq1 > 0) & (freq2 > 0)
|
| 1755 |
+
diff = np.where(mask,
|
| 1756 |
+
(np.maximum(freq1, freq2) / np.minimum(freq1, freq2)) - 1.0,
|
| 1757 |
+
penalty)
|
| 1758 |
+
|
| 1759 |
+
sum_term = np.sum((diff ** p) * union_mask, axis=1)
|
| 1760 |
+
|
| 1761 |
+
return np.cbrt(sum_term) if p == 3 else np.power(sum_term, 1.0 / p)
|
| 1762 |
+
|
| 1763 |
+
###################################################################################
|
| 1764 |
+
|
| 1765 |
+
def counter_to_vector(counter, union_keys):
|
| 1766 |
+
|
| 1767 |
+
vec = np.zeros(union_keys.shape, dtype=float)
|
| 1768 |
+
keys = np.array(list(counter.keys()))
|
| 1769 |
+
values = np.array(list(counter.values()), dtype=float)
|
| 1770 |
+
indices = np.searchsorted(union_keys, keys)
|
| 1771 |
+
vec[indices] = values
|
| 1772 |
+
|
| 1773 |
+
return vec
|
| 1774 |
+
|
| 1775 |
+
###################################################################################
|
| 1776 |
+
|
| 1777 |
+
def precompute_signatures(signatures_dictionaries):
|
| 1778 |
+
|
| 1779 |
+
all_counters = [sig[1] for sig in signatures_dictionaries]
|
| 1780 |
+
global_union = np.array(sorted({key for counter in all_counters for key in counter.keys()}))
|
| 1781 |
+
|
| 1782 |
+
X = np.stack([counter_to_vector(sig[1], global_union) for sig in signatures_dictionaries])
|
| 1783 |
+
|
| 1784 |
+
return X, global_union
|
| 1785 |
+
|
| 1786 |
+
###################################################################################
|
| 1787 |
+
|
| 1788 |
+
def get_distances_np(trg_signature_dictionary,
|
| 1789 |
+
X,
|
| 1790 |
+
global_union,
|
| 1791 |
+
penalty=10,
|
| 1792 |
+
p=3
|
| 1793 |
+
):
|
| 1794 |
+
|
| 1795 |
+
target_vec = counter_to_vector(trg_signature_dictionary, global_union)
|
| 1796 |
+
|
| 1797 |
+
mask_both = (X > 0) & (target_vec > 0)
|
| 1798 |
+
|
| 1799 |
+
diff = np.where(mask_both,
|
| 1800 |
+
(np.maximum(X, target_vec) / np.minimum(X, target_vec)) - 1.0,
|
| 1801 |
+
penalty)
|
| 1802 |
+
|
| 1803 |
+
union_mask = (X > 0) | (target_vec > 0)
|
| 1804 |
+
|
| 1805 |
+
sum_term = np.sum((diff ** p) * union_mask, axis=1)
|
| 1806 |
+
|
| 1807 |
+
return np.cbrt(sum_term) if p == 3 else np.power(sum_term, 1.0 / p)
|
| 1808 |
+
|
| 1809 |
+
###################################################################################
|
| 1810 |
+
|
| 1811 |
+
def get_MIDI_signature(path_to_MIDI_file,
|
| 1812 |
+
transpose_factor=0,
|
| 1813 |
+
omit_drums=True
|
| 1814 |
+
):
|
| 1815 |
+
|
| 1816 |
+
try:
|
| 1817 |
+
|
| 1818 |
+
raw_score = midi2single_track_ms_score(path_to_MIDI_file)
|
| 1819 |
+
|
| 1820 |
+
escore = advanced_score_processor(raw_score, return_enhanced_score_notes=True)[0]
|
| 1821 |
+
|
| 1822 |
+
escore = augment_enhanced_score_notes(escore)
|
| 1823 |
+
|
| 1824 |
+
drums_offset = len(ALL_CHORDS_SORTED) + 128
|
| 1825 |
+
|
| 1826 |
+
transpose_factor = max(0, min(6, transpose_factor))
|
| 1827 |
+
|
| 1828 |
+
if transpose_factor > 0:
|
| 1829 |
+
|
| 1830 |
+
sidx = -transpose_factor
|
| 1831 |
+
eidx = transpose_factor
|
| 1832 |
+
|
| 1833 |
+
else:
|
| 1834 |
+
sidx = 0
|
| 1835 |
+
eidx = 1
|
| 1836 |
+
|
| 1837 |
+
src_sigs = []
|
| 1838 |
+
|
| 1839 |
+
for i in range(sidx, eidx):
|
| 1840 |
+
|
| 1841 |
+
escore_copy = copy.deepcopy(escore)
|
| 1842 |
+
|
| 1843 |
+
for e in escore_copy:
|
| 1844 |
+
e[4] += i
|
| 1845 |
+
|
| 1846 |
+
cscore = chordify_score([1000, escore_copy])
|
| 1847 |
+
|
| 1848 |
+
sig = []
|
| 1849 |
+
dsig = []
|
| 1850 |
+
|
| 1851 |
+
for c in cscore:
|
| 1852 |
+
|
| 1853 |
+
all_pitches = [e[4] if e[3] != 9 else e[4]+128 for e in c]
|
| 1854 |
+
chord = sorted(set(all_pitches))
|
| 1855 |
+
|
| 1856 |
+
pitches = sorted([p for p in chord if p < 128], reverse=True)
|
| 1857 |
+
drums = [(d+drums_offset)-128 for d in chord if d > 127]
|
| 1858 |
+
|
| 1859 |
+
if pitches:
|
| 1860 |
+
if len(pitches) > 1:
|
| 1861 |
+
|
| 1862 |
+
tones_chord = sorted(set([p % 12 for p in pitches]))
|
| 1863 |
+
|
| 1864 |
+
if tones_chord not in ALL_CHORDS_SORTED:
|
| 1865 |
+
tones_chord = check_and_fix_tones_chord(tones_chord)
|
| 1866 |
+
|
| 1867 |
+
sig_token = ALL_CHORDS_SORTED.index(tones_chord) + 128
|
| 1868 |
+
|
| 1869 |
+
elif len(pitches) == 1:
|
| 1870 |
+
sig_token = pitches[0]
|
| 1871 |
+
|
| 1872 |
+
sig.append(sig_token)
|
| 1873 |
+
|
| 1874 |
+
if drums:
|
| 1875 |
+
dsig.extend(drums)
|
| 1876 |
+
|
| 1877 |
+
|
| 1878 |
+
if omit_drums:
|
| 1879 |
+
sig_p = dict.fromkeys(sig, 0)
|
| 1880 |
+
|
| 1881 |
+
for item in sig:
|
| 1882 |
+
sig_p[item] += 1
|
| 1883 |
+
|
| 1884 |
+
else:
|
| 1885 |
+
sig_p = dict.fromkeys(sig+dsig, 0)
|
| 1886 |
+
|
| 1887 |
+
for item in sig+dsig:
|
| 1888 |
+
sig_p[item] += 1
|
| 1889 |
+
|
| 1890 |
+
src_sigs.append(sig_p)
|
| 1891 |
+
|
| 1892 |
+
return src_sigs
|
| 1893 |
+
|
| 1894 |
+
except:
|
| 1895 |
+
return []
|
| 1896 |
+
|
| 1897 |
+
###################################################################################
|
| 1898 |
+
|
| 1899 |
+
def load_pickle(input_file_name, ext='.pickle', verbose=True):
|
| 1900 |
+
|
| 1901 |
+
if input_file_name:
|
| 1902 |
+
|
| 1903 |
+
if verbose:
|
| 1904 |
+
print('Tegridy Pickle File Loader')
|
| 1905 |
+
print('Loading the pickle file. Please wait...')
|
| 1906 |
+
|
| 1907 |
+
if os.path.basename(input_file_name).endswith(ext):
|
| 1908 |
+
fname = input_file_name
|
| 1909 |
+
|
| 1910 |
+
else:
|
| 1911 |
+
fname = input_file_name + ext
|
| 1912 |
+
|
| 1913 |
+
with open(fname, 'rb') as pickle_file:
|
| 1914 |
+
content = pickle.load(pickle_file)
|
| 1915 |
+
|
| 1916 |
+
if verbose:
|
| 1917 |
+
print('Done!')
|
| 1918 |
+
|
| 1919 |
+
return content
|
| 1920 |
+
|
| 1921 |
+
else:
|
| 1922 |
+
return None
|
| 1923 |
+
|
| 1924 |
+
###################################################################################
|
| 1925 |
+
|
| 1926 |
+
def search_and_filter(sigs_dicts,
|
| 1927 |
+
X,
|
| 1928 |
+
global_union,
|
| 1929 |
+
monster_dir = './Monster-MIDI-Dataset/MIDIs/',
|
| 1930 |
+
master_dir = './Master-MIDI-Dataset/',
|
| 1931 |
+
output_dir = './Output-MIDI-Dataset/',
|
| 1932 |
+
number_of_top_matches_to_copy = 30,
|
| 1933 |
+
transpose_factor=6
|
| 1934 |
+
):
|
| 1935 |
+
|
| 1936 |
+
transpose_factor = max(0, min(6, transpose_factor))
|
| 1937 |
+
|
| 1938 |
+
if transpose_factor > 0:
|
| 1939 |
+
|
| 1940 |
+
tsidx = -transpose_factor
|
| 1941 |
+
teidx = transpose_factor
|
| 1942 |
+
|
| 1943 |
+
else:
|
| 1944 |
+
tsidx = 0
|
| 1945 |
+
teidx = 1
|
| 1946 |
+
|
| 1947 |
+
master_midis = create_files_list([master_dir])
|
| 1948 |
+
|
| 1949 |
+
os.makedirs(output_dir, exist_ok=True)
|
| 1950 |
+
|
| 1951 |
+
for midi in master_midis:
|
| 1952 |
+
|
| 1953 |
+
inp_fn = os.path.basename(midi)
|
| 1954 |
+
|
| 1955 |
+
print('=' * 70)
|
| 1956 |
+
print('Processing MIDI file:', inp_fn)
|
| 1957 |
+
print('=' * 70)
|
| 1958 |
+
|
| 1959 |
+
trg_sigs = get_MIDI_signature(midi, transpose_factor=transpose_factor)
|
| 1960 |
+
|
| 1961 |
+
tv = list(range(tsidx, teidx))
|
| 1962 |
+
|
| 1963 |
+
seen = []
|
| 1964 |
+
|
| 1965 |
+
for i in tqdm.tqdm(range(len(trg_sigs))):
|
| 1966 |
+
|
| 1967 |
+
dists = get_distances_np(trg_sigs[i], X, global_union)
|
| 1968 |
+
|
| 1969 |
+
sorted_indices = np.argsort(dists).tolist()
|
| 1970 |
+
|
| 1971 |
+
out_dir = os.path.splitext(inp_fn)[0]
|
| 1972 |
+
|
| 1973 |
+
os.makedirs(output_dir+'/'+out_dir, exist_ok=True)
|
| 1974 |
+
|
| 1975 |
+
for _, idx in enumerate(sorted_indices[:number_of_top_matches_to_copy]):
|
| 1976 |
+
|
| 1977 |
+
fn = sigs_dicts[idx][0]
|
| 1978 |
+
dist = dists[idx]
|
| 1979 |
+
|
| 1980 |
+
new_fn = output_dir+out_dir+'/'+str(dist)+'_'+str(tv[i])+'_'+fn+'.mid'
|
| 1981 |
+
|
| 1982 |
+
if fn not in seen:
|
| 1983 |
+
|
| 1984 |
+
src_fn = monster_dir+fn[0]+'/'+fn+'.mid'
|
| 1985 |
+
|
| 1986 |
+
if os.path.exists(src_fn):
|
| 1987 |
+
shutil.copy2(src_fn, new_fn)
|
| 1988 |
+
seen.append(fn)
|
| 1989 |
+
|
| 1990 |
+
print('=' * 70)
|
| 1991 |
+
print('Done!')
|
| 1992 |
+
print('=' * 70)
|
| 1993 |
+
|
| 1994 |
+
###################################################################################
|
| 1995 |
+
|
| 1996 |
+
print('Module is loaded!')
|
| 1997 |
+
print('Enjoy! :)')
|
| 1998 |
+
print('=' * 70)
|
| 1999 |
+
|
| 2000 |
+
###################################################################################
|
| 2001 |
+
# This is the end of the monster_search_and_filter Python module
|
| 2002 |
+
###################################################################################
|