File size: 5,535 Bytes
01b6a02 021a2bf 01b6a02 9965e26 01b6a02 e3e2b4c 01b6a02 e3e2b4c 01b6a02 e3e2b4c 01b6a02 b3ad672 01b6a02 e3e2b4c 01b6a02 021a2bf |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 |
---
library_name: transformers.js
tags:
- pose-estimation
license: apache-2.0
---
https://github.com/open-mmlab/mmpose/tree/main/projects/rtmo with ONNX weights to be compatible with Transformers.js.
## Usage (Transformers.js)
If you haven't already, you can install the [Transformers.js](https://huggingface.co/docs/transformers.js) JavaScript library from [NPM](https://www.npmjs.com/package/@xenova/transformers) using:
```bash
npm i @xenova/transformers
```
**Example:** Perform pose-estimation w/ `Xenova/RTMO-t`.
```js
import { AutoModel, AutoProcessor, RawImage } from '@xenova/transformers';
// Load model and processor
const model_id = 'Xenova/RTMO-t';
const model = await AutoModel.from_pretrained(model_id);
const processor = await AutoProcessor.from_pretrained(model_id);
// Read image and run processor
const url = 'https://huggingface.co/datasets/Xenova/transformers.js-docs/resolve/main/football-match.jpg';
const image = await RawImage.read(url);
const { pixel_values, original_sizes, reshaped_input_sizes } = await processor(image);
// Predict bounding boxes and keypoints
const { dets, keypoints } = await model({ input: pixel_values });
// Select the first image
const predicted_boxes = dets.tolist()[0];
const predicted_points = keypoints.tolist()[0];
const [height, width] = original_sizes[0];
const [resized_height, resized_width] = reshaped_input_sizes[0];
// Compute scale values
const xScale = width / resized_width;
const yScale = height / resized_height;
// Define thresholds
const point_threshold = 0.3;
const box_threshold = 0.3;
// Display results
for (let i = 0; i < predicted_boxes.length; ++i) {
const [xmin, ymin, xmax, ymax, box_score] = predicted_boxes[i];
if (box_score < box_threshold) continue;
const x1 = (xmin * xScale).toFixed(2);
const y1 = (ymin * yScale).toFixed(2);
const x2 = (xmax * xScale).toFixed(2);
const y2 = (ymax * yScale).toFixed(2);
console.log(`Found person at [${x1}, ${y1}, ${x2}, ${y2}] with score ${box_score.toFixed(3)}`)
const points = predicted_points[i]; // of shape [17, 3]
for (let id = 0; id < points.length; ++id) {
const label = model.config.id2label[id];
const [x, y, point_score] = points[id];
if (point_score < point_threshold) continue;
console.log(` - ${label}: (${(x * xScale).toFixed(2)}, ${(y * yScale).toFixed(2)}) with score ${point_score.toFixed(3)}`);
}
}
```
<details>
<summary>See example output</summary>
```
Found person at [411.10, 63.87, 647.68, 505.40] with score 0.986
- nose: (526.09, 119.83) with score 0.874
- left_eye: (539.01, 110.39) with score 0.696
- right_eye: (512.50, 111.08) with score 0.662
- left_shoulder: (563.59, 171.10) with score 0.999
- right_shoulder: (467.38, 160.82) with score 0.999
- left_elbow: (572.72, 240.61) with score 0.999
- right_elbow: (437.86, 218.20) with score 0.998
- left_wrist: (603.74, 303.53) with score 0.995
- right_wrist: (506.01, 218.68) with score 0.992
- left_hip: (536.00, 306.25) with score 1.000
- right_hip: (472.79, 311.69) with score 0.999
- left_knee: (580.82, 366.38) with score 0.996
- right_knee: (500.25, 449.72) with score 0.954
- left_ankle: (572.21, 449.52) with score 0.993
- right_ankle: (541.37, 436.71) with score 0.916
Found person at [93.58, 19.64, 492.62, 522.45] with score 0.909
- left_shoulder: (233.76, 109.57) with score 0.971
- right_shoulder: (229.56, 100.34) with score 0.950
- left_elbow: (317.31, 162.73) with score 0.950
- right_elbow: (229.98, 179.31) with score 0.934
- left_wrist: (385.59, 219.03) with score 0.870
- right_wrist: (161.31, 230.74) with score 0.952
- left_hip: (351.23, 243.42) with score 0.998
- right_hip: (361.94, 240.70) with score 0.999
- left_knee: (297.77, 382.00) with score 0.998
- right_knee: (306.07, 393.59) with score 1.000
- left_ankle: (413.48, 354.16) with score 1.000
- right_ankle: (445.30, 488.11) with score 0.999
Found person at [-1.46, 50.68, 160.66, 371.74] with score 0.780
- nose: (80.17, 81.16) with score 0.570
- left_eye: (85.17, 75.45) with score 0.383
- right_eye: (70.20, 77.09) with score 0.382
- left_shoulder: (121.30, 114.98) with score 0.981
- right_shoulder: (46.56, 114.41) with score 0.981
- left_elbow: (144.09, 163.76) with score 0.777
- right_elbow: (29.69, 159.24) with score 0.886
- left_wrist: (142.31, 205.64) with score 0.725
- right_wrist: (6.24, 199.62) with score 0.876
- left_hip: (108.07, 208.90) with score 0.992
- right_hip: (64.72, 212.01) with score 0.996
- left_knee: (115.26, 276.52) with score 0.998
- right_knee: (65.09, 283.25) with score 0.998
- left_ankle: (126.09, 340.42) with score 0.991
- right_ankle: (63.88, 348.88) with score 0.977
Found person at [526.35, 36.25, 650.42, 280.90] with score 0.328
- nose: (554.06, 71.87) with score 0.901
- left_eye: (562.10, 66.30) with score 0.928
- right_eye: (546.65, 66.36) with score 0.746
- left_ear: (575.98, 68.17) with score 0.658
- left_shoulder: (588.04, 102.61) with score 0.999
- right_shoulder: (526.00, 102.94) with score 0.704
- left_elbow: (618.11, 149.18) with score 0.984
- left_wrist: (630.77, 189.42) with score 0.961
- left_hip: (578.74, 181.42) with score 0.966
- right_hip: (530.33, 176.46) with score 0.698
- left_knee: (568.74, 233.01) with score 0.958
- right_knee: (542.44, 243.87) with score 0.687
- left_ankle: (585.17, 284.79) with score 0.838
- right_ankle: (550.07, 292.19) with score 0.435
```
</details> |