mirror of
https://github.com/Orama-Interactive/Pixelorama.git
synced 2025-01-18 09:09:47 +00:00
Implement audio layers (#1149)
* Initial work on audio layers * Load ogg audio files * Fix playback position * Support mp3 files * Play audio at the appropriate position when the animation runs, and stop when the pause button is pressed * Change audio cel textures for the cels where audio is playing * Fix audio not playing at the appropriate position * Don't play audio is layer is invisible * Set the audio layer names to be the imported audio file names * Import audio from videos * Export videos with audio Only works with mp3 for now * Remove support for ogg audio files as they cannot be saved At least until I find a way to save them. Wav files will be supported with Godot 4.4 * Fix adding/removing in-between frames breaking the visual indication of audio cels * Minor code improvements * Export audio in videos with custom delay * Support frame delay * Change the frame where the audio plays at * Fix crashes when the audio layer has no track * Remove unneeded cel properties for audio cels * Pxo loading/saving * Load audio files from the audio layer properties * Change the audio driver to Dummy from the Preferences for performance reasons * Clone audio layers, disable layer merge and FX buttons when an audio layer is selected * Easily change the playback frame of an audio layer from the right click menu of cel buttons * Update Translations.pot * Some code improvements and documentation * Stop audio from playing when looping, and the audio does not play at the first frame * Update audio cel buttons when changing the audio of the layer * Mute audio layer when hiding it mid-play * Only plays the portion of the sound that corresponds to the specific frame so maybe we should do that as well When the animation is not running. If it is running, play the sound properly. * Some code changes to allow for potential negative frames placement for audio This woud allow audio to be placed in negative frames, which essentially means that audio would start before the first frame. This is not yet supported, however, because I don't know how to make it work with FFMPEG.
This commit is contained in:
parent
6100bdc8df
commit
18e9e2ec56
|
@ -1773,6 +1773,10 @@ msgstr ""
|
|||
msgid "If enabled, the application window can become transparent. This affects performance, so keep it off if you don't need it."
|
||||
msgstr ""
|
||||
|
||||
#. An option found in the preferences, under the Performance section.
|
||||
msgid "Use dummy audio driver"
|
||||
msgstr ""
|
||||
|
||||
#. Found in the Preferences, under Drivers. Specifies the renderer/video driver being used.
|
||||
msgid "Renderer:"
|
||||
msgstr ""
|
||||
|
@ -2203,6 +2207,10 @@ msgstr ""
|
|||
msgid "Unlink Cels"
|
||||
msgstr ""
|
||||
|
||||
#. An option found in the right click menu of an audio cel. If selected, the audio of the audio layer will start playing from this frame.
|
||||
msgid "Play audio here"
|
||||
msgstr ""
|
||||
|
||||
msgid "Properties"
|
||||
msgstr ""
|
||||
|
||||
|
@ -2243,6 +2251,9 @@ msgstr ""
|
|||
msgid "Tilemap"
|
||||
msgstr ""
|
||||
|
||||
msgid "Audio"
|
||||
msgstr ""
|
||||
|
||||
msgid "Layers"
|
||||
msgstr ""
|
||||
|
||||
|
@ -2275,6 +2286,11 @@ msgstr ""
|
|||
msgid "Add Tilemap Layer"
|
||||
msgstr ""
|
||||
|
||||
#. One of the options of the create new layer button.
|
||||
#: src/UI/Timeline/AnimationTimeline.tscn
|
||||
msgid "Add Audio Layer"
|
||||
msgstr ""
|
||||
|
||||
#: src/UI/Timeline/AnimationTimeline.tscn
|
||||
msgid "Remove current layer"
|
||||
msgstr ""
|
||||
|
@ -2405,6 +2421,17 @@ msgstr ""
|
|||
msgid "Expand/collapse group"
|
||||
msgstr ""
|
||||
|
||||
#. Refers to the audio file of an audio layer.
|
||||
msgid "Audio file:"
|
||||
msgstr ""
|
||||
|
||||
msgid "Load file"
|
||||
msgstr ""
|
||||
|
||||
#. An option in the audio layer properties, allows users to play the audio starting from a specific frame.
|
||||
msgid "Play at frame:"
|
||||
msgstr ""
|
||||
|
||||
msgid "Palette"
|
||||
msgstr ""
|
||||
|
||||
|
|
BIN
assets/graphics/misc/musical_note.png
Normal file
BIN
assets/graphics/misc/musical_note.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 192 B |
34
assets/graphics/misc/musical_note.png.import
Normal file
34
assets/graphics/misc/musical_note.png.import
Normal file
|
@ -0,0 +1,34 @@
|
|||
[remap]
|
||||
|
||||
importer="texture"
|
||||
type="CompressedTexture2D"
|
||||
uid="uid://dfjd72smxp6ma"
|
||||
path="res://.godot/imported/musical_note.png-f1be7cc6341733e6ffe2fa5b650b80c2.ctex"
|
||||
metadata={
|
||||
"vram_texture": false
|
||||
}
|
||||
|
||||
[deps]
|
||||
|
||||
source_file="res://assets/graphics/misc/musical_note.png"
|
||||
dest_files=["res://.godot/imported/musical_note.png-f1be7cc6341733e6ffe2fa5b650b80c2.ctex"]
|
||||
|
||||
[params]
|
||||
|
||||
compress/mode=0
|
||||
compress/high_quality=false
|
||||
compress/lossy_quality=0.7
|
||||
compress/hdr_compression=1
|
||||
compress/normal_map=0
|
||||
compress/channel_pack=0
|
||||
mipmaps/generate=false
|
||||
mipmaps/limit=-1
|
||||
roughness/mode=0
|
||||
roughness/src_normal=""
|
||||
process/fix_alpha_border=true
|
||||
process/premult_alpha=false
|
||||
process/normal_map_invert_y=false
|
||||
process/hdr_as_srgb=false
|
||||
process/hdr_clamp_exposure=false
|
||||
process/size_limit=0
|
||||
detect_3d/compress_to=1
|
|
@ -27,10 +27,6 @@ config/windows_native_icon="res://assets/graphics/icons/icon.ico"
|
|||
config/ExtensionsAPI_Version=5
|
||||
config/Pxo_Version=4
|
||||
|
||||
[audio]
|
||||
|
||||
driver/driver="Dummy"
|
||||
|
||||
[autoload]
|
||||
|
||||
Global="*res://src/Autoload/Global.gd"
|
||||
|
|
|
@ -282,7 +282,7 @@ func process_animation(project := Global.current_project) -> void:
|
|||
for cel in frame.cels:
|
||||
var image := Image.new()
|
||||
image.copy_from(cel.get_image())
|
||||
var duration := frame.duration * (1.0 / project.fps)
|
||||
var duration := frame.get_duration_in_seconds(project.fps)
|
||||
processed_images.append(
|
||||
ProcessedImage.new(image, project.frames.find(frame), duration)
|
||||
)
|
||||
|
@ -298,7 +298,7 @@ func process_animation(project := Global.current_project) -> void:
|
|||
image.copy_from(crop)
|
||||
if trim_images:
|
||||
image = image.get_region(image.get_used_rect())
|
||||
var duration := frame.duration * (1.0 / project.fps)
|
||||
var duration := frame.get_duration_in_seconds(project.fps)
|
||||
processed_images.append(ProcessedImage.new(image, project.frames.find(frame), duration))
|
||||
|
||||
|
||||
|
@ -427,7 +427,7 @@ func export_processed_images(
|
|||
|
||||
if is_single_file_format(project):
|
||||
if is_using_ffmpeg(project.file_format):
|
||||
var video_exported := export_video(export_paths)
|
||||
var video_exported := export_video(export_paths, project)
|
||||
if not video_exported:
|
||||
Global.popup_error(
|
||||
tr("Video failed to export. Ensure that FFMPEG is installed correctly.")
|
||||
|
@ -505,8 +505,9 @@ func export_processed_images(
|
|||
|
||||
|
||||
## Uses FFMPEG to export a video
|
||||
func export_video(export_paths: PackedStringArray) -> bool:
|
||||
func export_video(export_paths: PackedStringArray, project: Project) -> bool:
|
||||
DirAccess.make_dir_absolute(TEMP_PATH)
|
||||
var video_duration := 0
|
||||
var temp_path_real := ProjectSettings.globalize_path(TEMP_PATH)
|
||||
var input_file_path := temp_path_real.path_join("input.txt")
|
||||
var input_file := FileAccess.open(input_file_path, FileAccess.WRITE)
|
||||
|
@ -516,25 +517,80 @@ func export_video(export_paths: PackedStringArray) -> bool:
|
|||
processed_images[i].image.save_png(temp_file_path)
|
||||
input_file.store_line("file '" + temp_file_name + "'")
|
||||
input_file.store_line("duration %s" % processed_images[i].duration)
|
||||
video_duration += processed_images[i].duration
|
||||
input_file.close()
|
||||
|
||||
# ffmpeg -y -f concat -i input.txt output_path
|
||||
var ffmpeg_execute: PackedStringArray = [
|
||||
"-y", "-f", "concat", "-i", input_file_path, export_paths[0]
|
||||
]
|
||||
var output := []
|
||||
var success := OS.execute(Global.ffmpeg_path, ffmpeg_execute, output, true)
|
||||
print(output)
|
||||
var temp_dir := DirAccess.open(TEMP_PATH)
|
||||
for file in temp_dir.get_files():
|
||||
temp_dir.remove(file)
|
||||
DirAccess.remove_absolute(TEMP_PATH)
|
||||
var success := OS.execute(Global.ffmpeg_path, ffmpeg_execute, [], true)
|
||||
if success < 0 or success > 1:
|
||||
var fail_text := """Video failed to export. Make sure you have FFMPEG installed
|
||||
and have set the correct path in the preferences."""
|
||||
Global.popup_error(tr(fail_text))
|
||||
_clear_temp_folder()
|
||||
return false
|
||||
# Find audio layers
|
||||
var ffmpeg_combine_audio: PackedStringArray = ["-y"]
|
||||
var audio_layer_count := 0
|
||||
var max_audio_duration := 0
|
||||
var adelay_string := ""
|
||||
for layer in project.get_all_audio_layers():
|
||||
if layer.audio is AudioStreamMP3:
|
||||
var temp_file_name := str(audio_layer_count + 1).pad_zeros(number_of_digits) + ".mp3"
|
||||
var temp_file_path := temp_path_real.path_join(temp_file_name)
|
||||
var temp_audio_file := FileAccess.open(temp_file_path, FileAccess.WRITE)
|
||||
temp_audio_file.store_buffer(layer.audio.data)
|
||||
ffmpeg_combine_audio.append("-i")
|
||||
ffmpeg_combine_audio.append(temp_file_path)
|
||||
var delay := floori(layer.playback_position * 1000)
|
||||
# [n]adelay=delay_in_ms:all=1[na]
|
||||
adelay_string += (
|
||||
"[%s]adelay=%s:all=1[%sa];" % [audio_layer_count, delay, audio_layer_count]
|
||||
)
|
||||
audio_layer_count += 1
|
||||
if layer.get_audio_length() >= max_audio_duration:
|
||||
max_audio_duration = layer.get_audio_length()
|
||||
if audio_layer_count > 0:
|
||||
# If we have audio layers, merge them all into one file.
|
||||
for i in audio_layer_count:
|
||||
adelay_string += "[%sa]" % i
|
||||
var amix_inputs_string := "amix=inputs=%s[a]" % audio_layer_count
|
||||
var final_filter_string := adelay_string + amix_inputs_string
|
||||
var audio_file_path := temp_path_real.path_join("audio.mp3")
|
||||
ffmpeg_combine_audio.append_array(
|
||||
PackedStringArray(
|
||||
["-filter_complex", final_filter_string, "-map", '"[a]"', audio_file_path]
|
||||
)
|
||||
)
|
||||
# ffmpeg -i input1 -i input2 ... -i inputn -filter_complex amix=inputs=n output_path
|
||||
var combined_audio_success := OS.execute(Global.ffmpeg_path, ffmpeg_combine_audio, [], true)
|
||||
if combined_audio_success == 0 or combined_audio_success == 1:
|
||||
var copied_video := temp_path_real.path_join("video." + export_paths[0].get_extension())
|
||||
# Then mix the audio file with the video.
|
||||
DirAccess.copy_absolute(export_paths[0], copied_video)
|
||||
# ffmpeg -y -i video_file -i input_audio -c:v copy -map 0:v:0 -map 1:a:0 video_file
|
||||
var ffmpeg_final_video: PackedStringArray = [
|
||||
"-y", "-i", copied_video, "-i", audio_file_path
|
||||
]
|
||||
if max_audio_duration > video_duration:
|
||||
ffmpeg_final_video.append("-shortest")
|
||||
ffmpeg_final_video.append_array(
|
||||
["-c:v", "copy", "-map", "0:v:0", "-map", "1:a:0", export_paths[0]]
|
||||
)
|
||||
OS.execute(Global.ffmpeg_path, ffmpeg_final_video, [], true)
|
||||
_clear_temp_folder()
|
||||
return true
|
||||
|
||||
|
||||
func _clear_temp_folder() -> void:
|
||||
var temp_dir := DirAccess.open(TEMP_PATH)
|
||||
for file in temp_dir.get_files():
|
||||
temp_dir.remove(file)
|
||||
DirAccess.remove_absolute(TEMP_PATH)
|
||||
|
||||
|
||||
func export_animated(args: Dictionary) -> void:
|
||||
var project: Project = args["project"]
|
||||
var exporter: AImgIOBaseExporter = args["exporter"]
|
||||
|
|
|
@ -631,7 +631,7 @@ class ProjectAPI:
|
|||
|
||||
## Returns the current cel.
|
||||
## Cel type can be checked using function [method get_class_name] inside the cel
|
||||
## type can be GroupCel, PixelCel, Cel3D, or BaseCel.
|
||||
## type can be GroupCel, PixelCel, Cel3D, CelTileMap, AudioCel or BaseCel.
|
||||
func get_current_cel() -> BaseCel:
|
||||
return current_project.get_current_cel()
|
||||
|
||||
|
|
|
@ -14,7 +14,7 @@ signal cel_switched ## Emitted whenever you select a different cel.
|
|||
signal project_data_changed(project: Project) ## Emitted when project data is modified.
|
||||
signal font_loaded ## Emitted when a new font has been loaded, or an old one gets unloaded.
|
||||
|
||||
enum LayerTypes { PIXEL, GROUP, THREE_D, TILEMAP }
|
||||
enum LayerTypes { PIXEL, GROUP, THREE_D, TILEMAP, AUDIO }
|
||||
enum GridTypes { CARTESIAN, ISOMETRIC, ALL }
|
||||
## ## Used to tell whether a color is being taken from the current theme,
|
||||
## or if it is a custom color.
|
||||
|
@ -490,6 +490,11 @@ var window_transparency := false:
|
|||
return
|
||||
window_transparency = value
|
||||
_save_to_override_file()
|
||||
var dummy_audio_driver := false:
|
||||
set(value):
|
||||
if value != dummy_audio_driver:
|
||||
dummy_audio_driver = value
|
||||
_save_to_override_file()
|
||||
|
||||
## Found in Preferences. The time (in minutes) after which backup is created (if enabled).
|
||||
var autosave_interval := 1.0:
|
||||
|
@ -726,6 +731,7 @@ func _init() -> void:
|
|||
window_transparency = ProjectSettings.get_setting(
|
||||
"display/window/per_pixel_transparency/allowed"
|
||||
)
|
||||
dummy_audio_driver = ProjectSettings.get_setting("audio/driver/driver") == "Dummy"
|
||||
|
||||
|
||||
func _ready() -> void:
|
||||
|
@ -1187,3 +1193,6 @@ func _save_to_override_file() -> void:
|
|||
file.store_line("[display]\n")
|
||||
file.store_line("window/subwindows/embed_subwindows=%s" % single_window_mode)
|
||||
file.store_line("window/per_pixel_transparency/allowed=%s" % window_transparency)
|
||||
if dummy_audio_driver:
|
||||
file.store_line("[audio]\n")
|
||||
file.store_line('driver/driver="Dummy"')
|
||||
|
|
|
@ -45,6 +45,8 @@ func handle_loading_file(file: String) -> void:
|
|||
return
|
||||
var file_name: String = file.get_file().get_basename()
|
||||
Global.control.find_child("ShaderEffect").change_shader(shader, file_name)
|
||||
elif file_ext == "mp3": # Audio file
|
||||
open_audio_file(file)
|
||||
|
||||
else: # Image files
|
||||
# Attempt to load as APNG.
|
||||
|
@ -185,8 +187,8 @@ func handle_loading_video(file: String) -> bool:
|
|||
project_size.x = temp_image.get_width()
|
||||
if temp_image.get_height() > project_size.y:
|
||||
project_size.y = temp_image.get_height()
|
||||
DirAccess.remove_absolute(Export.TEMP_PATH)
|
||||
if images_to_import.size() == 0 or project_size == Vector2i.ZERO:
|
||||
DirAccess.remove_absolute(Export.TEMP_PATH)
|
||||
return false # We didn't find any images, return
|
||||
# If we found images, create a new project out of them
|
||||
var new_project := Project.new([], file.get_basename().get_file(), project_size)
|
||||
|
@ -196,6 +198,14 @@ func handle_loading_video(file: String) -> bool:
|
|||
Global.projects.append(new_project)
|
||||
Global.tabs.current_tab = Global.tabs.get_tab_count() - 1
|
||||
Global.canvas.camera_zoom()
|
||||
var output_audio_file := temp_path_real.path_join("audio.mp3")
|
||||
# ffmpeg -y -i input_file -vn audio.mp3
|
||||
var ffmpeg_execute_audio: PackedStringArray = ["-y", "-i", file, "-vn", output_audio_file]
|
||||
OS.execute(Global.ffmpeg_path, ffmpeg_execute_audio, [], true)
|
||||
if FileAccess.file_exists(output_audio_file):
|
||||
open_audio_file(output_audio_file)
|
||||
temp_dir.remove("audio.mp3")
|
||||
DirAccess.remove_absolute(Export.TEMP_PATH)
|
||||
return true
|
||||
|
||||
|
||||
|
@ -438,6 +448,14 @@ func save_pxo_file(
|
|||
zip_packer.start_file(tileset_path.path_join(str(j)))
|
||||
zip_packer.write_file(tile.image.get_data())
|
||||
zip_packer.close_file()
|
||||
var audio_layers := project.get_all_audio_layers()
|
||||
for i in audio_layers.size():
|
||||
var layer := audio_layers[i]
|
||||
var audio_path := "audio/%s" % i
|
||||
if layer.audio is AudioStreamMP3:
|
||||
zip_packer.start_file(audio_path)
|
||||
zip_packer.write_file(layer.audio.data)
|
||||
zip_packer.close_file()
|
||||
zip_packer.close()
|
||||
|
||||
if temp_path != path:
|
||||
|
@ -902,6 +920,23 @@ func set_new_imported_tab(project: Project, path: String) -> void:
|
|||
Global.tabs.delete_tab(prev_project_pos)
|
||||
|
||||
|
||||
func open_audio_file(path: String) -> void:
|
||||
var audio_stream: AudioStream
|
||||
var file := FileAccess.open(path, FileAccess.READ)
|
||||
audio_stream = AudioStreamMP3.new()
|
||||
audio_stream.data = file.get_buffer(file.get_length())
|
||||
if not is_instance_valid(audio_stream):
|
||||
return
|
||||
var project := Global.current_project
|
||||
for layer in project.layers:
|
||||
if layer is AudioLayer and not is_instance_valid(layer.audio):
|
||||
layer.audio = audio_stream
|
||||
return
|
||||
var new_layer := AudioLayer.new(project, path.get_basename().get_file())
|
||||
new_layer.audio = audio_stream
|
||||
Global.animation_timeline.add_layer(new_layer, project)
|
||||
|
||||
|
||||
func update_autosave() -> void:
|
||||
if not is_instance_valid(autosave_timer):
|
||||
return
|
||||
|
|
|
@ -800,7 +800,10 @@ func _cel_switched() -> void:
|
|||
var layer: BaseLayer = Global.current_project.layers[Global.current_project.current_layer]
|
||||
var layer_type := layer.get_layer_type()
|
||||
# Do not make any changes when its the same type of layer, or a group layer
|
||||
if layer_type == _curr_layer_type or layer_type == Global.LayerTypes.GROUP:
|
||||
if (
|
||||
layer_type == _curr_layer_type
|
||||
or layer_type in [Global.LayerTypes.GROUP, Global.LayerTypes.AUDIO]
|
||||
):
|
||||
return
|
||||
_show_relevant_tools(layer_type)
|
||||
|
||||
|
|
18
src/Classes/Cels/AudioCel.gd
Normal file
18
src/Classes/Cels/AudioCel.gd
Normal file
|
@ -0,0 +1,18 @@
|
|||
class_name AudioCel
|
||||
extends BaseCel
|
||||
## A class for the properties of cels in AudioLayers.
|
||||
## The term "cel" comes from "celluloid" (https://en.wikipedia.org/wiki/Cel).
|
||||
|
||||
|
||||
func _init(_opacity := 1.0) -> void:
|
||||
opacity = _opacity
|
||||
image_texture = ImageTexture.new()
|
||||
|
||||
|
||||
func get_image() -> Image:
|
||||
var image := Global.current_project.new_empty_image()
|
||||
return image
|
||||
|
||||
|
||||
func get_class_name() -> String:
|
||||
return "AudioCel"
|
|
@ -11,3 +11,26 @@ var user_data := "" ## User defined data, set in the frame properties.
|
|||
func _init(_cels: Array[BaseCel] = [], _duration := 1.0) -> void:
|
||||
cels = _cels
|
||||
duration = _duration
|
||||
|
||||
|
||||
func get_duration_in_seconds(fps: float) -> float:
|
||||
return duration * (1.0 / fps)
|
||||
|
||||
|
||||
func position_in_seconds(project: Project, start_from := 0) -> float:
|
||||
var pos := 0.0
|
||||
var index := project.frames.find(self)
|
||||
if index > start_from:
|
||||
for i in range(start_from, index):
|
||||
if i >= 0:
|
||||
var frame := project.frames[i]
|
||||
pos += frame.get_duration_in_seconds(project.fps)
|
||||
else:
|
||||
pos += 1.0 / project.fps
|
||||
else:
|
||||
if start_from >= project.frames.size():
|
||||
return -1.0
|
||||
for i in range(start_from, index, -1):
|
||||
var frame := project.frames[i]
|
||||
pos -= frame.get_duration_in_seconds(project.fps)
|
||||
return pos
|
||||
|
|
74
src/Classes/Layers/AudioLayer.gd
Normal file
74
src/Classes/Layers/AudioLayer.gd
Normal file
|
@ -0,0 +1,74 @@
|
|||
class_name AudioLayer
|
||||
extends BaseLayer
|
||||
## A unique type of layer which acts as an audio track for the timeline.
|
||||
## Each audio layer has one audio stream, and its starting position can be
|
||||
## in any point during the animation.
|
||||
|
||||
signal audio_changed
|
||||
signal playback_frame_changed
|
||||
|
||||
var audio: AudioStream: ## The audio stream of the layer.
|
||||
set(value):
|
||||
audio = value
|
||||
audio_changed.emit()
|
||||
var playback_position := 0.0: ## The time in seconds where the audio stream starts playing.
|
||||
get():
|
||||
if playback_frame >= 0:
|
||||
var frame := project.frames[playback_frame]
|
||||
return frame.position_in_seconds(project)
|
||||
var pos := 0.0
|
||||
for i in absi(playback_frame):
|
||||
pos -= 1.0 / project.fps
|
||||
return pos
|
||||
var playback_frame := 0: ## The frame where the audio stream starts playing.
|
||||
set(value):
|
||||
playback_frame = value
|
||||
playback_frame_changed.emit()
|
||||
|
||||
|
||||
func _init(_project: Project, _name := "") -> void:
|
||||
project = _project
|
||||
name = _name
|
||||
|
||||
|
||||
## Returns the length of the audio stream.
|
||||
func get_audio_length() -> float:
|
||||
if is_instance_valid(audio):
|
||||
return audio.get_length()
|
||||
else:
|
||||
return -1.0
|
||||
|
||||
|
||||
## Returns the class name of the audio stream. E.g. "AudioStreamMP3".
|
||||
func get_audio_type() -> String:
|
||||
if not is_instance_valid(audio):
|
||||
return ""
|
||||
return audio.get_class()
|
||||
|
||||
|
||||
# Overridden Methods:
|
||||
func serialize() -> Dictionary:
|
||||
var data := {
|
||||
"name": name,
|
||||
"type": get_layer_type(),
|
||||
"playback_frame": playback_frame,
|
||||
"audio_type": get_audio_type()
|
||||
}
|
||||
return data
|
||||
|
||||
|
||||
func deserialize(dict: Dictionary) -> void:
|
||||
super.deserialize(dict)
|
||||
playback_frame = dict.get("playback_frame", playback_frame)
|
||||
|
||||
|
||||
func get_layer_type() -> int:
|
||||
return Global.LayerTypes.AUDIO
|
||||
|
||||
|
||||
func new_empty_cel() -> AudioCel:
|
||||
return AudioCel.new()
|
||||
|
||||
|
||||
func set_name_to_default(number: int) -> void:
|
||||
name = tr("Audio") + " %s" % number
|
|
@ -8,6 +8,7 @@ signal serialized(dict: Dictionary)
|
|||
signal about_to_deserialize(dict: Dictionary)
|
||||
signal resized
|
||||
signal timeline_updated
|
||||
signal fps_changed
|
||||
|
||||
const INDEXED_MODE := Image.FORMAT_MAX + 1
|
||||
|
||||
|
@ -65,7 +66,10 @@ var brushes: Array[Image] = []
|
|||
var reference_images: Array[ReferenceImage] = []
|
||||
var reference_index: int = -1 # The currently selected index ReferenceImage
|
||||
var vanishing_points := [] ## Array of Vanishing Points
|
||||
var fps := 6.0
|
||||
var fps := 6.0:
|
||||
set(value):
|
||||
fps = value
|
||||
fps_changed.emit()
|
||||
var user_data := "" ## User defined data, set in the project properties.
|
||||
|
||||
var x_symmetry_point: float
|
||||
|
@ -356,6 +360,7 @@ func deserialize(dict: Dictionary, zip_reader: ZIPReader = null, file: FileAcces
|
|||
tileset.deserialize(saved_tileset)
|
||||
tilesets.append(tileset)
|
||||
if dict.has("frames") and dict.has("layers"):
|
||||
var audio_layers := 0
|
||||
for saved_layer in dict.layers:
|
||||
match int(saved_layer.get("type", Global.LayerTypes.PIXEL)):
|
||||
Global.LayerTypes.PIXEL:
|
||||
|
@ -366,6 +371,18 @@ func deserialize(dict: Dictionary, zip_reader: ZIPReader = null, file: FileAcces
|
|||
layers.append(Layer3D.new(self))
|
||||
Global.LayerTypes.TILEMAP:
|
||||
layers.append(LayerTileMap.new(self, null))
|
||||
Global.LayerTypes.AUDIO:
|
||||
var layer := AudioLayer.new(self)
|
||||
var audio_path := "audio/%s" % audio_layers
|
||||
if zip_reader.file_exists(audio_path):
|
||||
var audio_data := zip_reader.read_file(audio_path)
|
||||
var stream: AudioStream
|
||||
if saved_layer.get("audio_type", "") == "AudioStreamMP3":
|
||||
stream = AudioStreamMP3.new()
|
||||
stream.data = audio_data
|
||||
layer.audio = stream
|
||||
layers.append(layer)
|
||||
audio_layers += 1
|
||||
|
||||
var frame_i := 0
|
||||
for frame in dict.frames:
|
||||
|
@ -390,6 +407,8 @@ func deserialize(dict: Dictionary, zip_reader: ZIPReader = null, file: FileAcces
|
|||
var tileset := tilesets[tileset_index]
|
||||
var new_cel := CelTileMap.new(tileset, image)
|
||||
cels.append(new_cel)
|
||||
Global.LayerTypes.AUDIO:
|
||||
cels.append(AudioCel.new())
|
||||
cel["pxo_version"] = pxo_version
|
||||
cels[cel_i].deserialize(cel)
|
||||
_deserialize_metadata(cels[cel_i], cel)
|
||||
|
@ -640,10 +659,10 @@ func find_first_drawable_cel(frame := frames[current_frame]) -> BaseCel:
|
|||
var result: BaseCel
|
||||
var cel := frame.cels[0]
|
||||
var i := 0
|
||||
while cel is GroupCel and i < layers.size():
|
||||
while (cel is GroupCel or cel is AudioCel) and i < layers.size():
|
||||
cel = frame.cels[i]
|
||||
i += 1
|
||||
if not cel is GroupCel:
|
||||
if cel is not GroupCel and cel is not AudioCel:
|
||||
result = cel
|
||||
return result
|
||||
|
||||
|
@ -658,6 +677,18 @@ func get_all_pixel_cels() -> Array[PixelCel]:
|
|||
return cels
|
||||
|
||||
|
||||
func get_all_audio_layers(only_valid_streams := true) -> Array[AudioLayer]:
|
||||
var audio_layers: Array[AudioLayer]
|
||||
for layer in layers:
|
||||
if layer is AudioLayer:
|
||||
if only_valid_streams:
|
||||
if is_instance_valid(layer.audio):
|
||||
audio_layers.append(layer)
|
||||
else:
|
||||
audio_layers.append(layer)
|
||||
return audio_layers
|
||||
|
||||
|
||||
## Reads data from [param cels] and appends them to [param data],
|
||||
## to be used for the undo/redo system.
|
||||
## It adds data such as the images of [PixelCel]s,
|
||||
|
|
|
@ -181,6 +181,13 @@ var preferences: Array[Preference] = [
|
|||
false,
|
||||
true
|
||||
),
|
||||
Preference.new(
|
||||
"dummy_audio_driver",
|
||||
"Performance/PerformanceContainer/DummyAudioDriver",
|
||||
"button_pressed",
|
||||
false,
|
||||
true
|
||||
),
|
||||
Preference.new("tablet_driver", "Drivers/DriversContainer/TabletDriver", "selected", 0)
|
||||
]
|
||||
|
||||
|
|
|
@ -1142,18 +1142,28 @@ mouse_default_cursor_shape = 2
|
|||
button_pressed = true
|
||||
text = "On"
|
||||
|
||||
[node name="WindowTransparencyLabel" type="Label" parent="HSplitContainer/VBoxContainer/ScrollContainer/RightSide/Performance/PerformanceContainer"]
|
||||
[node name="WindowTransparencyLabel" type="Label" parent="HSplitContainer/VBoxContainer/ScrollContainer/RightSide/Performance/PerformanceContainer" groups=["DesktopOnly"]]
|
||||
layout_mode = 2
|
||||
tooltip_text = "If enabled, the application window can become transparent. This affects performance, so keep it off if you don't need it."
|
||||
mouse_filter = 0
|
||||
text = "Enable window transparency"
|
||||
|
||||
[node name="WindowTransparency" type="CheckBox" parent="HSplitContainer/VBoxContainer/ScrollContainer/RightSide/Performance/PerformanceContainer"]
|
||||
[node name="WindowTransparency" type="CheckBox" parent="HSplitContainer/VBoxContainer/ScrollContainer/RightSide/Performance/PerformanceContainer" groups=["DesktopOnly"]]
|
||||
layout_mode = 2
|
||||
tooltip_text = "If enabled, the application window can become transparent. This affects performance, so keep it off if you don't need it."
|
||||
mouse_default_cursor_shape = 2
|
||||
text = "On"
|
||||
|
||||
[node name="DummyAudioDriverLabel" type="Label" parent="HSplitContainer/VBoxContainer/ScrollContainer/RightSide/Performance/PerformanceContainer" groups=["DesktopOnly"]]
|
||||
layout_mode = 2
|
||||
mouse_filter = 0
|
||||
text = "Use dummy audio driver"
|
||||
|
||||
[node name="DummyAudioDriver" type="CheckBox" parent="HSplitContainer/VBoxContainer/ScrollContainer/RightSide/Performance/PerformanceContainer" groups=["DesktopOnly"]]
|
||||
layout_mode = 2
|
||||
mouse_default_cursor_shape = 2
|
||||
text = "On"
|
||||
|
||||
[node name="Drivers" type="VBoxContainer" parent="HSplitContainer/VBoxContainer/ScrollContainer/RightSide"]
|
||||
visible = false
|
||||
layout_mode = 2
|
||||
|
|
|
@ -1,7 +1,14 @@
|
|||
extends Panel
|
||||
|
||||
## Emitted when the animation starts playing.
|
||||
signal animation_started(forward: bool)
|
||||
## Emitted when the animation reaches the final frame and is not looping,
|
||||
## or if the animation is manually paused.
|
||||
## Note: This signal is not emitted if the animation is looping.
|
||||
signal animation_finished
|
||||
## Emitted when the animation loops, meaning when it reaches the final frame
|
||||
## and the animation keeps playing.
|
||||
signal animation_looped
|
||||
|
||||
enum LoopType { NO, CYCLE, PINGPONG }
|
||||
|
||||
|
@ -41,6 +48,7 @@ var global_layer_expand := true
|
|||
@onready var move_up_layer := %MoveUpLayer as Button
|
||||
@onready var move_down_layer := %MoveDownLayer as Button
|
||||
@onready var merge_down_layer := %MergeDownLayer as Button
|
||||
@onready var layer_fx := %LayerFX as Button
|
||||
@onready var blend_modes_button := %BlendModes as OptionButton
|
||||
@onready var opacity_slider := %OpacitySlider as ValueSlider
|
||||
@onready var frame_scroll_container := %FrameScrollContainer as Control
|
||||
|
@ -687,9 +695,11 @@ func _on_AnimationTimer_timeout() -> void:
|
|||
animation_timer.wait_time = (
|
||||
project.frames[project.current_frame].duration * (1 / fps)
|
||||
)
|
||||
animation_looped.emit()
|
||||
animation_timer.start()
|
||||
LoopType.PINGPONG:
|
||||
animation_forward = false
|
||||
animation_looped.emit()
|
||||
_on_AnimationTimer_timeout()
|
||||
|
||||
else:
|
||||
|
@ -712,9 +722,11 @@ func _on_AnimationTimer_timeout() -> void:
|
|||
animation_timer.wait_time = (
|
||||
project.frames[project.current_frame].duration * (1 / fps)
|
||||
)
|
||||
animation_looped.emit()
|
||||
animation_timer.start()
|
||||
LoopType.PINGPONG:
|
||||
animation_forward = true
|
||||
animation_looped.emit()
|
||||
_on_AnimationTimer_timeout()
|
||||
frame_scroll_container.ensure_control_visible(
|
||||
Global.frame_hbox.get_child(project.current_frame)
|
||||
|
@ -855,6 +867,8 @@ func _on_add_layer_list_id_pressed(id: int) -> void:
|
|||
Global.LayerTypes.THREE_D:
|
||||
layer = Layer3D.new(project)
|
||||
SteamManager.set_achievement("ACH_3D_LAYER")
|
||||
Global.LayerTypes.AUDIO:
|
||||
layer = AudioLayer.new(project)
|
||||
add_layer(layer, project)
|
||||
|
||||
|
||||
|
@ -904,6 +918,8 @@ func _on_CloneLayer_pressed() -> void:
|
|||
cl_layer = LayerTileMap.new(project, src_layer.tileset)
|
||||
else:
|
||||
cl_layer = src_layer.get_script().new(project)
|
||||
if src_layer is AudioLayer:
|
||||
cl_layer.audio = src_layer.audio
|
||||
cl_layer.project = project
|
||||
cl_layer.index = src_layer.index
|
||||
var src_layer_data: Dictionary = src_layer.serialize()
|
||||
|
@ -1191,10 +1207,13 @@ func _toggle_layer_buttons() -> void:
|
|||
(
|
||||
project.current_layer == child_count
|
||||
or layer is GroupLayer
|
||||
or layer is AudioLayer
|
||||
or project.layers[project.current_layer - 1] is GroupLayer
|
||||
or project.layers[project.current_layer - 1] is Layer3D
|
||||
or project.layers[project.current_layer - 1] is AudioLayer
|
||||
)
|
||||
)
|
||||
Global.disable_button(layer_fx, layer is AudioLayer)
|
||||
|
||||
|
||||
func project_changed() -> void:
|
||||
|
|
|
@ -240,7 +240,7 @@ offset_left = -22.0
|
|||
offset_top = -10.0
|
||||
offset_bottom = 10.0
|
||||
mouse_default_cursor_shape = 2
|
||||
item_count = 4
|
||||
item_count = 5
|
||||
popup/item_0/text = "Add Pixel Layer"
|
||||
popup/item_1/text = "Add Group Layer"
|
||||
popup/item_1/id = 1
|
||||
|
@ -248,6 +248,8 @@ popup/item_2/text = "Add 3D Layer"
|
|||
popup/item_2/id = 2
|
||||
popup/item_3/text = "Add Tilemap Layer"
|
||||
popup/item_3/id = 3
|
||||
popup/item_4/text = "Add Audio Layer"
|
||||
popup/item_4/id = 4
|
||||
|
||||
[node name="TextureRect" type="TextureRect" parent="TimelineContainer/TimelineButtons/LayerTools/MarginContainer/LayerSettingsContainer/LayerButtons/AddLayer/AddLayerList"]
|
||||
layout_mode = 0
|
||||
|
@ -380,6 +382,7 @@ texture = ExtResource("5")
|
|||
stretch_mode = 3
|
||||
|
||||
[node name="LayerFX" type="Button" parent="TimelineContainer/TimelineButtons/LayerTools/MarginContainer/LayerSettingsContainer/LayerButtons" groups=["UIButtons"]]
|
||||
unique_name_in_owner = true
|
||||
custom_minimum_size = Vector2(24, 24)
|
||||
layout_mode = 2
|
||||
size_flags_horizontal = 3
|
||||
|
|
|
@ -31,6 +31,13 @@ func _ready() -> void:
|
|||
popup_menu.add_item("Unlink Cels")
|
||||
elif cel is GroupCel:
|
||||
transparent_checker.visible = false
|
||||
elif cel is AudioCel:
|
||||
popup_menu.add_item("Play audio here")
|
||||
_is_playing_audio()
|
||||
Global.cel_switched.connect(_is_playing_audio)
|
||||
Global.current_project.fps_changed.connect(_is_playing_audio)
|
||||
Global.current_project.layers[layer].audio_changed.connect(_is_playing_audio)
|
||||
Global.current_project.layers[layer].playback_frame_changed.connect(_is_playing_audio)
|
||||
|
||||
|
||||
func _notification(what: int) -> void:
|
||||
|
@ -66,6 +73,7 @@ func button_setup() -> void:
|
|||
|
||||
var base_layer := Global.current_project.layers[layer]
|
||||
tooltip_text = tr("Frame: %s, Layer: %s") % [frame + 1, base_layer.name]
|
||||
if cel is not AudioCel:
|
||||
cel_texture.texture = cel.image_texture
|
||||
if is_instance_valid(linked):
|
||||
linked.visible = cel.link_set != null
|
||||
|
@ -129,6 +137,10 @@ func _on_PopupMenu_id_pressed(id: int) -> void:
|
|||
properties.cel_indices = _get_cel_indices()
|
||||
properties.popup_centered()
|
||||
MenuOptions.DELETE:
|
||||
var layer_class := Global.current_project.layers[layer]
|
||||
if layer_class is AudioLayer:
|
||||
layer_class.playback_frame = frame
|
||||
else:
|
||||
_delete_cel_content()
|
||||
|
||||
MenuOptions.LINK, MenuOptions.UNLINK:
|
||||
|
@ -396,3 +408,16 @@ func _sort_cel_indices_by_frame(a: Array, b: Array) -> bool:
|
|||
if frame_a < frame_b:
|
||||
return true
|
||||
return false
|
||||
|
||||
|
||||
func _is_playing_audio() -> void:
|
||||
var frame_class := Global.current_project.frames[frame]
|
||||
var layer_class := Global.current_project.layers[layer] as AudioLayer
|
||||
var audio_length := layer_class.get_audio_length()
|
||||
var frame_pos := frame_class.position_in_seconds(
|
||||
Global.current_project, layer_class.playback_frame
|
||||
)
|
||||
if frame_pos >= 0 and frame_pos < audio_length:
|
||||
cel_texture.texture = preload("res://assets/graphics/misc/musical_note.png")
|
||||
else:
|
||||
cel_texture.texture = null
|
||||
|
|
|
@ -74,7 +74,6 @@ grow_vertical = 2
|
|||
[node name="PopupMenu" type="PopupMenu" parent="."]
|
||||
item_count = 1
|
||||
item_0/text = "Properties"
|
||||
item_0/id = 0
|
||||
|
||||
[connection signal="pressed" from="." to="." method="_on_CelButton_pressed"]
|
||||
[connection signal="id_pressed" from="PopupMenu" to="." method="_on_PopupMenu_id_pressed"]
|
||||
|
|
|
@ -15,18 +15,18 @@ func _on_visibility_changed() -> void:
|
|||
Global.dialog_open(visible)
|
||||
var first_cel := Global.current_project.frames[cel_indices[0][0]].cels[cel_indices[0][1]]
|
||||
if visible:
|
||||
if cel_indices.size() == 1:
|
||||
var layer := Global.current_project.layers[cel_indices[0][1]]
|
||||
frame_num.text = str(cel_indices[0][0] + 1)
|
||||
layer_num.text = layer.name
|
||||
else:
|
||||
var first_layer := Global.current_project.layers[cel_indices[0][1]]
|
||||
if cel_indices.size() == 1:
|
||||
frame_num.text = str(cel_indices[0][0] + 1)
|
||||
layer_num.text = first_layer.name
|
||||
else:
|
||||
var last_layer := Global.current_project.layers[cel_indices[-1][1]]
|
||||
frame_num.text = "[%s...%s]" % [cel_indices[0][0] + 1, cel_indices[-1][0] + 1]
|
||||
layer_num.text = "[%s...%s]" % [first_layer.name, last_layer.name]
|
||||
opacity_slider.value = first_cel.opacity * 100.0
|
||||
z_index_slider.value = first_cel.z_index
|
||||
user_data_text_edit.text = first_cel.user_data
|
||||
get_tree().set_group(&"VisualCels", "visible", first_layer is not AudioLayer)
|
||||
else:
|
||||
cel_indices = []
|
||||
|
||||
|
|
|
@ -33,12 +33,12 @@ layout_mode = 2
|
|||
text = "1"
|
||||
horizontal_alignment = 1
|
||||
|
||||
[node name="OpacityLabel" type="Label" parent="GridContainer"]
|
||||
[node name="OpacityLabel" type="Label" parent="GridContainer" groups=["VisualCels"]]
|
||||
layout_mode = 2
|
||||
size_flags_horizontal = 3
|
||||
text = "Opacity:"
|
||||
|
||||
[node name="OpacitySlider" type="TextureProgressBar" parent="GridContainer"]
|
||||
[node name="OpacitySlider" type="TextureProgressBar" parent="GridContainer" groups=["VisualCels"]]
|
||||
layout_mode = 2
|
||||
size_flags_horizontal = 3
|
||||
focus_mode = 2
|
||||
|
@ -52,12 +52,12 @@ stretch_margin_right = 3
|
|||
stretch_margin_bottom = 3
|
||||
script = ExtResource("1_85pb7")
|
||||
|
||||
[node name="ZIndexLabel" type="Label" parent="GridContainer"]
|
||||
[node name="ZIndexLabel" type="Label" parent="GridContainer" groups=["VisualCels"]]
|
||||
layout_mode = 2
|
||||
size_flags_horizontal = 3
|
||||
text = "Z-Index:"
|
||||
|
||||
[node name="ZIndexSlider" type="TextureProgressBar" parent="GridContainer"]
|
||||
[node name="ZIndexSlider" type="TextureProgressBar" parent="GridContainer" groups=["VisualCels"]]
|
||||
layout_mode = 2
|
||||
size_flags_horizontal = 3
|
||||
focus_mode = 2
|
||||
|
@ -76,11 +76,13 @@ script = ExtResource("1_85pb7")
|
|||
|
||||
[node name="UserDataLabel" type="Label" parent="GridContainer"]
|
||||
layout_mode = 2
|
||||
size_flags_horizontal = 3
|
||||
size_flags_vertical = 0
|
||||
text = "User data:"
|
||||
|
||||
[node name="UserDataTextEdit" type="TextEdit" parent="GridContainer"]
|
||||
layout_mode = 2
|
||||
size_flags_horizontal = 3
|
||||
scroll_fit_content_height = true
|
||||
|
||||
[connection signal="visibility_changed" from="." to="." method="_on_visibility_changed"]
|
||||
|
|
|
@ -19,8 +19,9 @@ func _ready() -> void:
|
|||
|
||||
|
||||
func _update_tooltip() -> void:
|
||||
var duration := Global.current_project.frames[frame].duration
|
||||
var duration_sec := duration * (1.0 / Global.current_project.fps)
|
||||
var frame_class := Global.current_project.frames[frame]
|
||||
var duration := frame_class.duration
|
||||
var duration_sec := frame_class.get_duration_in_seconds(Global.current_project.fps)
|
||||
var duration_str := str(duration_sec)
|
||||
if "." in duration_str: # If its a decimal value
|
||||
duration_str = "%.2f" % duration_sec # Up to 2 decimal places
|
||||
|
|
|
@ -14,7 +14,9 @@ var button_pressed := false:
|
|||
main_button.button_pressed = value
|
||||
get:
|
||||
return main_button.button_pressed
|
||||
var animation_running := false
|
||||
|
||||
var audio_player: AudioStreamPlayer
|
||||
@onready var properties: AcceptDialog = Global.control.find_child("LayerProperties")
|
||||
@onready var main_button := %LayerMainButton as Button
|
||||
@onready var expand_button := %ExpandButton as BaseButton
|
||||
|
@ -31,7 +33,7 @@ var button_pressed := false:
|
|||
func _ready() -> void:
|
||||
main_button.layer_index = layer_index
|
||||
main_button.hierarchy_depth_pixel_shift = HIERARCHY_DEPTH_PIXEL_SHIFT
|
||||
Global.cel_switched.connect(func(): z_index = 1 if button_pressed else 0)
|
||||
Global.cel_switched.connect(_on_cel_switched)
|
||||
var layer := Global.current_project.layers[layer_index]
|
||||
layer.name_changed.connect(func(): label.text = layer.name)
|
||||
layer.visibility_changed.connect(update_buttons)
|
||||
|
@ -39,6 +41,14 @@ func _ready() -> void:
|
|||
linked_button.visible = true
|
||||
elif layer is GroupLayer:
|
||||
expand_button.visible = true
|
||||
elif layer is AudioLayer:
|
||||
audio_player = AudioStreamPlayer.new()
|
||||
audio_player.stream = layer.audio
|
||||
layer.audio_changed.connect(func(): audio_player.stream = layer.audio)
|
||||
add_child(audio_player)
|
||||
Global.animation_timeline.animation_started.connect(_on_animation_started)
|
||||
Global.animation_timeline.animation_looped.connect(_on_animation_looped)
|
||||
Global.animation_timeline.animation_finished.connect(_on_animation_finished)
|
||||
custom_minimum_size.y = Global.animation_timeline.cel_size
|
||||
label.text = layer.name
|
||||
line_edit.text = layer.name
|
||||
|
@ -56,6 +66,64 @@ func _ready() -> void:
|
|||
update_buttons()
|
||||
|
||||
|
||||
func _on_cel_switched() -> void:
|
||||
z_index = 1 if button_pressed else 0
|
||||
var layer := Global.current_project.layers[layer_index]
|
||||
if layer is AudioLayer:
|
||||
if not is_instance_valid(audio_player):
|
||||
return
|
||||
if not layer.is_visible_in_hierarchy():
|
||||
audio_player.stop()
|
||||
return
|
||||
if animation_running:
|
||||
var current_frame := Global.current_project.current_frame
|
||||
if (
|
||||
current_frame == layer.playback_frame
|
||||
or (current_frame == 0 and layer.playback_frame < 0)
|
||||
):
|
||||
_play_audio(false)
|
||||
else:
|
||||
_play_audio(true)
|
||||
|
||||
|
||||
func _on_animation_started(_dir: bool) -> void:
|
||||
animation_running = true
|
||||
_play_audio(false)
|
||||
|
||||
|
||||
func _on_animation_looped() -> void:
|
||||
var layer := Global.current_project.layers[layer_index]
|
||||
if layer is AudioLayer:
|
||||
if layer.playback_frame > 0 or not layer.is_visible_in_hierarchy():
|
||||
if is_instance_valid(audio_player):
|
||||
audio_player.stop()
|
||||
|
||||
|
||||
func _on_animation_finished() -> void:
|
||||
animation_running = false
|
||||
if is_instance_valid(audio_player):
|
||||
audio_player.stop()
|
||||
|
||||
|
||||
func _play_audio(single_frame: bool) -> void:
|
||||
if not is_instance_valid(audio_player):
|
||||
return
|
||||
var project := Global.current_project
|
||||
var layer := project.layers[layer_index] as AudioLayer
|
||||
if not layer.is_visible_in_hierarchy():
|
||||
return
|
||||
var audio_length := layer.get_audio_length()
|
||||
var frame := project.frames[project.current_frame]
|
||||
var frame_pos := frame.position_in_seconds(project, layer.playback_frame)
|
||||
if frame_pos >= 0 and frame_pos < audio_length:
|
||||
audio_player.play(frame_pos)
|
||||
if single_frame:
|
||||
var timer := get_tree().create_timer(frame.get_duration_in_seconds(project.fps))
|
||||
timer.timeout.connect(func(): audio_player.stop())
|
||||
else:
|
||||
audio_player.stop()
|
||||
|
||||
|
||||
func update_buttons() -> void:
|
||||
var layer := Global.current_project.layers[layer_index]
|
||||
if layer is GroupLayer:
|
||||
|
|
|
@ -169,7 +169,6 @@ caret_blink_interval = 0.5
|
|||
disable_3d = true
|
||||
item_count = 2
|
||||
item_0/text = "Properties"
|
||||
item_0/id = 0
|
||||
item_1/text = "Clipping mask"
|
||||
item_1/checkable = 1
|
||||
item_1/id = 1
|
||||
|
|
|
@ -4,11 +4,18 @@ signal layer_property_changed
|
|||
|
||||
var layer_indices: PackedInt32Array
|
||||
|
||||
@onready var grid_container: GridContainer = $GridContainer
|
||||
@onready var name_line_edit := $GridContainer/NameLineEdit as LineEdit
|
||||
@onready var opacity_slider := $GridContainer/OpacitySlider as ValueSlider
|
||||
@onready var blend_modes_button := $GridContainer/BlendModeOptionButton as OptionButton
|
||||
@onready var play_at_frame_slider := $GridContainer/PlayAtFrameSlider as ValueSlider
|
||||
@onready var user_data_text_edit := $GridContainer/UserDataTextEdit as TextEdit
|
||||
@onready var tileset_option_button := $GridContainer/TilesetOptionButton as OptionButton
|
||||
@onready var audio_file_dialog := $AudioFileDialog as FileDialog
|
||||
|
||||
|
||||
func _ready() -> void:
|
||||
audio_file_dialog.use_native_dialog = Global.use_native_file_dialogs
|
||||
|
||||
|
||||
func _on_visibility_changed() -> void:
|
||||
|
@ -23,8 +30,13 @@ func _on_visibility_changed() -> void:
|
|||
opacity_slider.value = first_layer.opacity * 100.0
|
||||
var blend_mode_index := blend_modes_button.get_item_index(first_layer.blend_mode)
|
||||
blend_modes_button.selected = blend_mode_index
|
||||
if first_layer is AudioLayer:
|
||||
play_at_frame_slider.value = first_layer.playback_frame + 1
|
||||
play_at_frame_slider.max_value = project.frames.size()
|
||||
user_data_text_edit.text = first_layer.user_data
|
||||
get_tree().set_group(&"VisualLayers", "visible", first_layer is not AudioLayer)
|
||||
get_tree().set_group(&"TilemapLayers", "visible", first_layer is LayerTileMap)
|
||||
get_tree().set_group(&"AudioLayers", "visible", first_layer is AudioLayer)
|
||||
tileset_option_button.clear()
|
||||
if first_layer is LayerTileMap:
|
||||
for i in project.tilesets.size():
|
||||
|
@ -149,3 +161,28 @@ func _on_tileset_option_button_item_selected(index: int) -> void:
|
|||
project.undo_redo.add_undo_method(Global.canvas.draw_layers)
|
||||
project.undo_redo.add_undo_method(func(): Global.cel_switched.emit())
|
||||
project.undo_redo.commit_action()
|
||||
|
||||
|
||||
func _on_audio_file_button_pressed() -> void:
|
||||
audio_file_dialog.popup_centered()
|
||||
|
||||
|
||||
func _on_play_at_frame_slider_value_changed(value: float) -> void:
|
||||
if layer_indices.size() == 0:
|
||||
return
|
||||
for layer_index in layer_indices:
|
||||
var layer := Global.current_project.layers[layer_index]
|
||||
if layer is AudioLayer:
|
||||
layer.playback_frame = value - 1
|
||||
|
||||
|
||||
func _on_audio_file_dialog_file_selected(path: String) -> void:
|
||||
var audio_stream: AudioStream
|
||||
if path.get_extension() == "mp3":
|
||||
var file := FileAccess.open(path, FileAccess.READ)
|
||||
audio_stream = AudioStreamMP3.new()
|
||||
audio_stream.data = file.get_buffer(file.get_length())
|
||||
for layer_index in layer_indices:
|
||||
var layer := Global.current_project.layers[layer_index]
|
||||
if layer is AudioLayer:
|
||||
layer.audio = audio_stream
|
||||
|
|
|
@ -5,14 +5,15 @@
|
|||
|
||||
[node name="LayerProperties" type="AcceptDialog"]
|
||||
title = "Layer properties"
|
||||
size = Vector2i(300, 208)
|
||||
position = Vector2i(0, 36)
|
||||
size = Vector2i(300, 270)
|
||||
script = ExtResource("1_54q1t")
|
||||
|
||||
[node name="GridContainer" type="GridContainer" parent="."]
|
||||
offset_left = 8.0
|
||||
offset_top = 8.0
|
||||
offset_right = 292.0
|
||||
offset_bottom = 159.0
|
||||
offset_bottom = 221.0
|
||||
columns = 2
|
||||
|
||||
[node name="NameLabel" type="Label" parent="GridContainer"]
|
||||
|
@ -24,12 +25,12 @@ text = "Name:"
|
|||
layout_mode = 2
|
||||
size_flags_horizontal = 3
|
||||
|
||||
[node name="OpacityLabel" type="Label" parent="GridContainer"]
|
||||
[node name="OpacityLabel" type="Label" parent="GridContainer" groups=["VisualLayers"]]
|
||||
layout_mode = 2
|
||||
size_flags_horizontal = 3
|
||||
text = "Opacity:"
|
||||
|
||||
[node name="OpacitySlider" type="TextureProgressBar" parent="GridContainer"]
|
||||
[node name="OpacitySlider" type="TextureProgressBar" parent="GridContainer" groups=["VisualLayers"]]
|
||||
layout_mode = 2
|
||||
size_flags_horizontal = 3
|
||||
focus_mode = 2
|
||||
|
@ -42,16 +43,44 @@ stretch_margin_right = 3
|
|||
stretch_margin_bottom = 3
|
||||
script = ExtResource("2_bwpwc")
|
||||
|
||||
[node name="BlendModeLabel" type="Label" parent="GridContainer"]
|
||||
[node name="BlendModeLabel" type="Label" parent="GridContainer" groups=["VisualLayers"]]
|
||||
layout_mode = 2
|
||||
size_flags_horizontal = 3
|
||||
text = "Blend mode:"
|
||||
|
||||
[node name="BlendModeOptionButton" type="OptionButton" parent="GridContainer"]
|
||||
[node name="BlendModeOptionButton" type="OptionButton" parent="GridContainer" groups=["VisualLayers"]]
|
||||
layout_mode = 2
|
||||
size_flags_horizontal = 3
|
||||
mouse_default_cursor_shape = 2
|
||||
|
||||
[node name="AudioFileLabel" type="Label" parent="GridContainer" groups=["AudioLayers"]]
|
||||
layout_mode = 2
|
||||
text = "Audio file:"
|
||||
|
||||
[node name="AudioFileButton" type="Button" parent="GridContainer" groups=["AudioLayers"]]
|
||||
layout_mode = 2
|
||||
mouse_default_cursor_shape = 2
|
||||
text = "Load file"
|
||||
|
||||
[node name="PlayAtFrameLabel" type="Label" parent="GridContainer" groups=["AudioLayers"]]
|
||||
layout_mode = 2
|
||||
text = "Play at frame:"
|
||||
|
||||
[node name="PlayAtFrameSlider" type="TextureProgressBar" parent="GridContainer" groups=["AudioLayers"]]
|
||||
layout_mode = 2
|
||||
focus_mode = 2
|
||||
mouse_default_cursor_shape = 2
|
||||
theme_type_variation = &"ValueSlider"
|
||||
min_value = 1.0
|
||||
value = 1.0
|
||||
allow_greater = true
|
||||
nine_patch_stretch = true
|
||||
stretch_margin_left = 3
|
||||
stretch_margin_top = 3
|
||||
stretch_margin_right = 3
|
||||
stretch_margin_bottom = 3
|
||||
script = ExtResource("2_bwpwc")
|
||||
|
||||
[node name="UserDataLabel" type="Label" parent="GridContainer"]
|
||||
layout_mode = 2
|
||||
size_flags_horizontal = 3
|
||||
|
@ -73,9 +102,21 @@ text = "Tileset:"
|
|||
layout_mode = 2
|
||||
mouse_default_cursor_shape = 2
|
||||
|
||||
[node name="AudioFileDialog" type="FileDialog" parent="."]
|
||||
title = "Open a File"
|
||||
size = Vector2i(870, 400)
|
||||
always_on_top = true
|
||||
ok_button_text = "Open"
|
||||
file_mode = 0
|
||||
access = 2
|
||||
filters = PackedStringArray("*.mp3 ; MP3 Audio")
|
||||
|
||||
[connection signal="visibility_changed" from="." to="." method="_on_visibility_changed"]
|
||||
[connection signal="text_changed" from="GridContainer/NameLineEdit" to="." method="_on_name_line_edit_text_changed"]
|
||||
[connection signal="value_changed" from="GridContainer/OpacitySlider" to="." method="_on_opacity_slider_value_changed"]
|
||||
[connection signal="item_selected" from="GridContainer/BlendModeOptionButton" to="." method="_on_blend_mode_option_button_item_selected"]
|
||||
[connection signal="pressed" from="GridContainer/AudioFileButton" to="." method="_on_audio_file_button_pressed"]
|
||||
[connection signal="value_changed" from="GridContainer/PlayAtFrameSlider" to="." method="_on_play_at_frame_slider_value_changed"]
|
||||
[connection signal="text_changed" from="GridContainer/UserDataTextEdit" to="." method="_on_user_data_text_edit_text_changed"]
|
||||
[connection signal="item_selected" from="GridContainer/TilesetOptionButton" to="." method="_on_tileset_option_button_item_selected"]
|
||||
[connection signal="file_selected" from="AudioFileDialog" to="." method="_on_audio_file_dialog_file_selected"]
|
||||
|
|
Loading…
Reference in a new issue