-
Notifications
You must be signed in to change notification settings - Fork 75
/
Copy pathscrape_with_controller.py
93 lines (86 loc) · 3.99 KB
/
scrape_with_controller.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
from time import sleep
from typing import List
import numpy as np
from tdw.controller import Controller
from tdw.tdw_utils import TDWUtils
from tdw.add_ons.third_person_camera import ThirdPersonCamera
from tdw.add_ons.resonance_audio_initializer import ResonanceAudioInitializer
from tdw.add_ons.py_impact import PyImpact
from tdw.audio_utils import AudioUtils
from tdw.physics_audio.scrape_material import ScrapeMaterial
from tdw.backend.paths import EXAMPLE_CONTROLLER_OUTPUT_PATH
"""
Generate scrape sounds with PyImpact without using physics data and play the audio in a circle around the avatar listener.
"""
c = Controller()
# Add a camera and initialize audio.
y = 2
camera = ThirdPersonCamera(avatar_id="a",
position={"x": 0, "y": y, "z": 0})
resonance_audio_floor = "metal"
resonance_audio_wall = "brick"
resonance_audio_ceiling = "acousticTile"
audio = ResonanceAudioInitializer(avatar_id="a",
floor=resonance_audio_floor,
front_wall=resonance_audio_wall,
back_wall=resonance_audio_wall,
left_wall=resonance_audio_wall,
right_wall=resonance_audio_wall,
ceiling=resonance_audio_ceiling)
c.add_ons.extend([camera, audio])
# Initialize the scene.
c.communicate(TDWUtils.create_empty_room(12, 12))
# Initialize PyImpact but DON'T add it as an add-on.
py_impact_floor = ResonanceAudioInitializer.AUDIO_MATERIALS[resonance_audio_floor]
impact_sound_floor = py_impact_floor.name + "_4"
py_impact = PyImpact(initial_amp=0.9, floor=py_impact_floor, resonance_audio=True, rng=np.random.RandomState(0))
# Generate contact normals and set the collision velocity.
contact_normals: List[np.ndarray] = list()
for i in range(3):
contact_normals.append(np.array([0, 1, 0]))
velocity = np.array([1.5, 0, 0])
path = EXAMPLE_CONTROLLER_OUTPUT_PATH.joinpath("scrape_with_controller/audio.wav")
print(f"Audio will be saved to: {path}")
if not path.parent.exists():
path.parent.mkdir(parents=True)
AudioUtils.start(output_path=path)
# Add sounds in a circle around the avatar.
distance = 1.5
theta = 0
d_theta = 15
contact_radius = 0.0625
while theta < 360:
# Get the position of the sound.
rad = np.radians(theta)
x = np.cos(rad) * distance
z = np.sin(rad) * distance
# Generate contact points around the sound's position.
contact_points: List[np.ndarray] = list()
contact_angle = 0
for i in range(3):
r = np.radians(contact_angle)
contact_x = np.cos(r) * contact_radius + x
contact_z = np.sin(r) * contact_radius + z
contact_points.append(np.array([contact_x, y, contact_z]))
# Get a sound.
for i in range(5):
c.communicate(py_impact.get_scrape_sound_command(velocity=velocity,
contact_points=contact_points,
contact_normals=contact_normals,
primary_id=0,
primary_material="metal_1",
primary_amp=0.4,
primary_mass=4,
secondary_id=None,
secondary_material="ceramic_4",
secondary_amp=0.5,
secondary_mass=100,
primary_resonance=0.4,
secondary_resonance=0.2,
scrape_material=ScrapeMaterial.ceramic))
py_impact.reset()
sleep(0.15)
theta += d_theta
sleep(0.15)
AudioUtils.stop()
c.communicate({"$type": "terminate"})