pax_global_header 0000666 0000000 0000000 00000000064 14155633267 0014526 g ustar 00root root 0000000 0000000 52 comment=c0e76da4b7715e311f0480b121aeb89837e19562
megapixels-1.4.3/ 0000775 0000000 0000000 00000000000 14155633267 0013671 5 ustar 00root root 0000000 0000000 megapixels-1.4.3/.clang-format 0000664 0000000 0000000 00000037430 14155633267 0016253 0 ustar 00root root 0000000 0000000 # SPDX-License-Identifier: GPL-2.0
#
# clang-format configuration file. Intended for clang-format >= 4.
#
# For more information, see:
#
# Documentation/process/clang-format.rst
# https://clang.llvm.org/docs/ClangFormat.html
# https://clang.llvm.org/docs/ClangFormatStyleOptions.html
#
---
AccessModifierOffset: -4
AlignAfterOpenBracket: Align
AlignConsecutiveAssignments: false
AlignConsecutiveDeclarations: false
AlignEscapedNewlines: Right
AlignOperands: true
AlignTrailingComments: false
AllowAllParametersOfDeclarationOnNextLine: false
AllowShortBlocksOnASingleLine: false
AllowShortCaseLabelsOnASingleLine: false
AllowShortFunctionsOnASingleLine: None
AllowShortIfStatementsOnASingleLine: false
AllowShortLoopsOnASingleLine: false
AlwaysBreakAfterDefinitionReturnType: All
AlwaysBreakAfterReturnType: None
AlwaysBreakBeforeMultilineStrings: false
AlwaysBreakTemplateDeclarations: false
BinPackArguments: false
BinPackParameters: false
BraceWrapping:
AfterClass: false
AfterControlStatement: false
AfterEnum: false
AfterFunction: true
AfterNamespace: true
AfterObjCDeclaration: false
AfterStruct: false
AfterUnion: false
AfterExternBlock: false
BeforeCatch: false
BeforeElse: false
IndentBraces: false
SplitEmptyFunction: true
SplitEmptyRecord: true
SplitEmptyNamespace: true
BreakBeforeBinaryOperators: None
BreakBeforeBraces: Custom
BreakBeforeInheritanceComma: false
BreakBeforeTernaryOperators: false
BreakConstructorInitializersBeforeComma: false
BreakConstructorInitializers: BeforeComma
BreakAfterJavaFieldAnnotations: false
BreakStringLiterals: false
ColumnLimit: 85
CommentPragmas: '^ IWYU pragma:'
CompactNamespaces: false
ConstructorInitializerAllOnOneLineOrOnePerLine: false
ConstructorInitializerIndentWidth: 8
ContinuationIndentWidth: 8
Cpp11BracedListStyle: false
DerivePointerAlignment: false
DisableFormat: false
ExperimentalAutoDetectBinPacking: false
FixNamespaceComments: false
# Taken from:
# git grep -h '^#define [^[:space:]]*for_each[^[:space:]]*(' include/ \
# | sed "s,^#define \([^[:space:]]*for_each[^[:space:]]*\)(.*$, - '\1'," \
# | sort | uniq
ForEachMacros:
- 'apei_estatus_for_each_section'
- 'ata_for_each_dev'
- 'ata_for_each_link'
- '__ata_qc_for_each'
- 'ata_qc_for_each'
- 'ata_qc_for_each_raw'
- 'ata_qc_for_each_with_internal'
- 'ax25_for_each'
- 'ax25_uid_for_each'
- '__bio_for_each_bvec'
- 'bio_for_each_bvec'
- 'bio_for_each_bvec_all'
- 'bio_for_each_integrity_vec'
- '__bio_for_each_segment'
- 'bio_for_each_segment'
- 'bio_for_each_segment_all'
- 'bio_list_for_each'
- 'bip_for_each_vec'
- 'bitmap_for_each_clear_region'
- 'bitmap_for_each_set_region'
- 'blkg_for_each_descendant_post'
- 'blkg_for_each_descendant_pre'
- 'blk_queue_for_each_rl'
- 'bond_for_each_slave'
- 'bond_for_each_slave_rcu'
- 'bpf_for_each_spilled_reg'
- 'btree_for_each_safe128'
- 'btree_for_each_safe32'
- 'btree_for_each_safe64'
- 'btree_for_each_safel'
- 'card_for_each_dev'
- 'cgroup_taskset_for_each'
- 'cgroup_taskset_for_each_leader'
- 'cpufreq_for_each_entry'
- 'cpufreq_for_each_entry_idx'
- 'cpufreq_for_each_valid_entry'
- 'cpufreq_for_each_valid_entry_idx'
- 'css_for_each_child'
- 'css_for_each_descendant_post'
- 'css_for_each_descendant_pre'
- 'device_for_each_child_node'
- 'dma_fence_chain_for_each'
- 'do_for_each_ftrace_op'
- 'drm_atomic_crtc_for_each_plane'
- 'drm_atomic_crtc_state_for_each_plane'
- 'drm_atomic_crtc_state_for_each_plane_state'
- 'drm_atomic_for_each_plane_damage'
- 'drm_client_for_each_connector_iter'
- 'drm_client_for_each_modeset'
- 'drm_connector_for_each_possible_encoder'
- 'drm_for_each_bridge_in_chain'
- 'drm_for_each_connector_iter'
- 'drm_for_each_crtc'
- 'drm_for_each_encoder'
- 'drm_for_each_encoder_mask'
- 'drm_for_each_fb'
- 'drm_for_each_legacy_plane'
- 'drm_for_each_plane'
- 'drm_for_each_plane_mask'
- 'drm_for_each_privobj'
- 'drm_mm_for_each_hole'
- 'drm_mm_for_each_node'
- 'drm_mm_for_each_node_in_range'
- 'drm_mm_for_each_node_safe'
- 'flow_action_for_each'
- 'for_each_active_dev_scope'
- 'for_each_active_drhd_unit'
- 'for_each_active_iommu'
- 'for_each_aggr_pgid'
- 'for_each_available_child_of_node'
- 'for_each_bio'
- 'for_each_board_func_rsrc'
- 'for_each_bvec'
- 'for_each_card_auxs'
- 'for_each_card_auxs_safe'
- 'for_each_card_components'
- 'for_each_card_dapms'
- 'for_each_card_pre_auxs'
- 'for_each_card_prelinks'
- 'for_each_card_rtds'
- 'for_each_card_rtds_safe'
- 'for_each_card_widgets'
- 'for_each_card_widgets_safe'
- 'for_each_cgroup_storage_type'
- 'for_each_child_of_node'
- 'for_each_clear_bit'
- 'for_each_clear_bit_from'
- 'for_each_cmsghdr'
- 'for_each_compatible_node'
- 'for_each_component_dais'
- 'for_each_component_dais_safe'
- 'for_each_comp_order'
- 'for_each_console'
- 'for_each_cpu'
- 'for_each_cpu_and'
- 'for_each_cpu_not'
- 'for_each_cpu_wrap'
- 'for_each_dapm_widgets'
- 'for_each_dev_addr'
- 'for_each_dev_scope'
- 'for_each_displayid_db'
- 'for_each_dma_cap_mask'
- 'for_each_dpcm_be'
- 'for_each_dpcm_be_rollback'
- 'for_each_dpcm_be_safe'
- 'for_each_dpcm_fe'
- 'for_each_drhd_unit'
- 'for_each_dss_dev'
- 'for_each_efi_memory_desc'
- 'for_each_efi_memory_desc_in_map'
- 'for_each_element'
- 'for_each_element_extid'
- 'for_each_element_id'
- 'for_each_endpoint_of_node'
- 'for_each_evictable_lru'
- 'for_each_fib6_node_rt_rcu'
- 'for_each_fib6_walker_rt'
- 'for_each_free_mem_pfn_range_in_zone'
- 'for_each_free_mem_pfn_range_in_zone_from'
- 'for_each_free_mem_range'
- 'for_each_free_mem_range_reverse'
- 'for_each_func_rsrc'
- 'for_each_hstate'
- 'for_each_if'
- 'for_each_iommu'
- 'for_each_ip_tunnel_rcu'
- 'for_each_irq_nr'
- 'for_each_link_codecs'
- 'for_each_link_cpus'
- 'for_each_link_platforms'
- 'for_each_lru'
- 'for_each_matching_node'
- 'for_each_matching_node_and_match'
- 'for_each_member'
- 'for_each_mem_region'
- 'for_each_memblock_type'
- 'for_each_memcg_cache_index'
- 'for_each_mem_pfn_range'
- '__for_each_mem_range'
- 'for_each_mem_range'
- '__for_each_mem_range_rev'
- 'for_each_mem_range_rev'
- 'for_each_migratetype_order'
- 'for_each_msi_entry'
- 'for_each_msi_entry_safe'
- 'for_each_net'
- 'for_each_net_continue_reverse'
- 'for_each_netdev'
- 'for_each_netdev_continue'
- 'for_each_netdev_continue_rcu'
- 'for_each_netdev_continue_reverse'
- 'for_each_netdev_feature'
- 'for_each_netdev_in_bond_rcu'
- 'for_each_netdev_rcu'
- 'for_each_netdev_reverse'
- 'for_each_netdev_safe'
- 'for_each_net_rcu'
- 'for_each_new_connector_in_state'
- 'for_each_new_crtc_in_state'
- 'for_each_new_mst_mgr_in_state'
- 'for_each_new_plane_in_state'
- 'for_each_new_private_obj_in_state'
- 'for_each_node'
- 'for_each_node_by_name'
- 'for_each_node_by_type'
- 'for_each_node_mask'
- 'for_each_node_state'
- 'for_each_node_with_cpus'
- 'for_each_node_with_property'
- 'for_each_nonreserved_multicast_dest_pgid'
- 'for_each_of_allnodes'
- 'for_each_of_allnodes_from'
- 'for_each_of_cpu_node'
- 'for_each_of_pci_range'
- 'for_each_old_connector_in_state'
- 'for_each_old_crtc_in_state'
- 'for_each_old_mst_mgr_in_state'
- 'for_each_oldnew_connector_in_state'
- 'for_each_oldnew_crtc_in_state'
- 'for_each_oldnew_mst_mgr_in_state'
- 'for_each_oldnew_plane_in_state'
- 'for_each_oldnew_plane_in_state_reverse'
- 'for_each_oldnew_private_obj_in_state'
- 'for_each_old_plane_in_state'
- 'for_each_old_private_obj_in_state'
- 'for_each_online_cpu'
- 'for_each_online_node'
- 'for_each_online_pgdat'
- 'for_each_pci_bridge'
- 'for_each_pci_dev'
- 'for_each_pci_msi_entry'
- 'for_each_pcm_streams'
- 'for_each_physmem_range'
- 'for_each_populated_zone'
- 'for_each_possible_cpu'
- 'for_each_present_cpu'
- 'for_each_prime_number'
- 'for_each_prime_number_from'
- 'for_each_process'
- 'for_each_process_thread'
- 'for_each_property_of_node'
- 'for_each_registered_fb'
- 'for_each_requested_gpio'
- 'for_each_requested_gpio_in_range'
- 'for_each_reserved_mem_range'
- 'for_each_reserved_mem_region'
- 'for_each_rtd_codec_dais'
- 'for_each_rtd_codec_dais_rollback'
- 'for_each_rtd_components'
- 'for_each_rtd_cpu_dais'
- 'for_each_rtd_cpu_dais_rollback'
- 'for_each_rtd_dais'
- 'for_each_set_bit'
- 'for_each_set_bit_from'
- 'for_each_set_clump8'
- 'for_each_sg'
- 'for_each_sg_dma_page'
- 'for_each_sg_page'
- 'for_each_sgtable_dma_page'
- 'for_each_sgtable_dma_sg'
- 'for_each_sgtable_page'
- 'for_each_sgtable_sg'
- 'for_each_sibling_event'
- 'for_each_subelement'
- 'for_each_subelement_extid'
- 'for_each_subelement_id'
- '__for_each_thread'
- 'for_each_thread'
- 'for_each_unicast_dest_pgid'
- 'for_each_wakeup_source'
- 'for_each_zone'
- 'for_each_zone_zonelist'
- 'for_each_zone_zonelist_nodemask'
- 'fwnode_for_each_available_child_node'
- 'fwnode_for_each_child_node'
- 'fwnode_graph_for_each_endpoint'
- 'gadget_for_each_ep'
- 'genradix_for_each'
- 'genradix_for_each_from'
- 'hash_for_each'
- 'hash_for_each_possible'
- 'hash_for_each_possible_rcu'
- 'hash_for_each_possible_rcu_notrace'
- 'hash_for_each_possible_safe'
- 'hash_for_each_rcu'
- 'hash_for_each_safe'
- 'hctx_for_each_ctx'
- 'hlist_bl_for_each_entry'
- 'hlist_bl_for_each_entry_rcu'
- 'hlist_bl_for_each_entry_safe'
- 'hlist_for_each'
- 'hlist_for_each_entry'
- 'hlist_for_each_entry_continue'
- 'hlist_for_each_entry_continue_rcu'
- 'hlist_for_each_entry_continue_rcu_bh'
- 'hlist_for_each_entry_from'
- 'hlist_for_each_entry_from_rcu'
- 'hlist_for_each_entry_rcu'
- 'hlist_for_each_entry_rcu_bh'
- 'hlist_for_each_entry_rcu_notrace'
- 'hlist_for_each_entry_safe'
- '__hlist_for_each_rcu'
- 'hlist_for_each_safe'
- 'hlist_nulls_for_each_entry'
- 'hlist_nulls_for_each_entry_from'
- 'hlist_nulls_for_each_entry_rcu'
- 'hlist_nulls_for_each_entry_safe'
- 'i3c_bus_for_each_i2cdev'
- 'i3c_bus_for_each_i3cdev'
- 'ide_host_for_each_port'
- 'ide_port_for_each_dev'
- 'ide_port_for_each_present_dev'
- 'idr_for_each_entry'
- 'idr_for_each_entry_continue'
- 'idr_for_each_entry_continue_ul'
- 'idr_for_each_entry_ul'
- 'in_dev_for_each_ifa_rcu'
- 'in_dev_for_each_ifa_rtnl'
- 'inet_bind_bucket_for_each'
- 'inet_lhash2_for_each_icsk_rcu'
- 'key_for_each'
- 'key_for_each_safe'
- 'klp_for_each_func'
- 'klp_for_each_func_safe'
- 'klp_for_each_func_static'
- 'klp_for_each_object'
- 'klp_for_each_object_safe'
- 'klp_for_each_object_static'
- 'kunit_suite_for_each_test_case'
- 'kvm_for_each_memslot'
- 'kvm_for_each_vcpu'
- 'list_for_each'
- 'list_for_each_codec'
- 'list_for_each_codec_safe'
- 'list_for_each_continue'
- 'list_for_each_entry'
- 'list_for_each_entry_continue'
- 'list_for_each_entry_continue_rcu'
- 'list_for_each_entry_continue_reverse'
- 'list_for_each_entry_from'
- 'list_for_each_entry_from_rcu'
- 'list_for_each_entry_from_reverse'
- 'list_for_each_entry_lockless'
- 'list_for_each_entry_rcu'
- 'list_for_each_entry_reverse'
- 'list_for_each_entry_safe'
- 'list_for_each_entry_safe_continue'
- 'list_for_each_entry_safe_from'
- 'list_for_each_entry_safe_reverse'
- 'list_for_each_prev'
- 'list_for_each_prev_safe'
- 'list_for_each_safe'
- 'llist_for_each'
- 'llist_for_each_entry'
- 'llist_for_each_entry_safe'
- 'llist_for_each_safe'
- 'mci_for_each_dimm'
- 'media_device_for_each_entity'
- 'media_device_for_each_intf'
- 'media_device_for_each_link'
- 'media_device_for_each_pad'
- 'nanddev_io_for_each_page'
- 'netdev_for_each_lower_dev'
- 'netdev_for_each_lower_private'
- 'netdev_for_each_lower_private_rcu'
- 'netdev_for_each_mc_addr'
- 'netdev_for_each_uc_addr'
- 'netdev_for_each_upper_dev_rcu'
- 'netdev_hw_addr_list_for_each'
- 'nft_rule_for_each_expr'
- 'nla_for_each_attr'
- 'nla_for_each_nested'
- 'nlmsg_for_each_attr'
- 'nlmsg_for_each_msg'
- 'nr_neigh_for_each'
- 'nr_neigh_for_each_safe'
- 'nr_node_for_each'
- 'nr_node_for_each_safe'
- 'of_for_each_phandle'
- 'of_property_for_each_string'
- 'of_property_for_each_u32'
- 'pci_bus_for_each_resource'
- 'pcm_for_each_format'
- 'ping_portaddr_for_each_entry'
- 'plist_for_each'
- 'plist_for_each_continue'
- 'plist_for_each_entry'
- 'plist_for_each_entry_continue'
- 'plist_for_each_entry_safe'
- 'plist_for_each_safe'
- 'pnp_for_each_card'
- 'pnp_for_each_dev'
- 'protocol_for_each_card'
- 'protocol_for_each_dev'
- 'queue_for_each_hw_ctx'
- 'radix_tree_for_each_slot'
- 'radix_tree_for_each_tagged'
- 'rbtree_postorder_for_each_entry_safe'
- 'rdma_for_each_block'
- 'rdma_for_each_port'
- 'rdma_umem_for_each_dma_block'
- 'resource_list_for_each_entry'
- 'resource_list_for_each_entry_safe'
- 'rhl_for_each_entry_rcu'
- 'rhl_for_each_rcu'
- 'rht_for_each'
- 'rht_for_each_entry'
- 'rht_for_each_entry_from'
- 'rht_for_each_entry_rcu'
- 'rht_for_each_entry_rcu_from'
- 'rht_for_each_entry_safe'
- 'rht_for_each_from'
- 'rht_for_each_rcu'
- 'rht_for_each_rcu_from'
- '__rq_for_each_bio'
- 'rq_for_each_bvec'
- 'rq_for_each_segment'
- 'scsi_for_each_prot_sg'
- 'scsi_for_each_sg'
- 'sctp_for_each_hentry'
- 'sctp_skb_for_each'
- 'shdma_for_each_chan'
- '__shost_for_each_device'
- 'shost_for_each_device'
- 'sk_for_each'
- 'sk_for_each_bound'
- 'sk_for_each_entry_offset_rcu'
- 'sk_for_each_from'
- 'sk_for_each_rcu'
- 'sk_for_each_safe'
- 'sk_nulls_for_each'
- 'sk_nulls_for_each_from'
- 'sk_nulls_for_each_rcu'
- 'snd_array_for_each'
- 'snd_pcm_group_for_each_entry'
- 'snd_soc_dapm_widget_for_each_path'
- 'snd_soc_dapm_widget_for_each_path_safe'
- 'snd_soc_dapm_widget_for_each_sink_path'
- 'snd_soc_dapm_widget_for_each_source_path'
- 'tb_property_for_each'
- 'tcf_exts_for_each_action'
- 'udp_portaddr_for_each_entry'
- 'udp_portaddr_for_each_entry_rcu'
- 'usb_hub_for_each_child'
- 'v4l2_device_for_each_subdev'
- 'v4l2_m2m_for_each_dst_buf'
- 'v4l2_m2m_for_each_dst_buf_safe'
- 'v4l2_m2m_for_each_src_buf'
- 'v4l2_m2m_for_each_src_buf_safe'
- 'virtio_device_for_each_vq'
- 'while_for_each_ftrace_op'
- 'xa_for_each'
- 'xa_for_each_marked'
- 'xa_for_each_range'
- 'xa_for_each_start'
- 'xas_for_each'
- 'xas_for_each_conflict'
- 'xas_for_each_marked'
- 'xbc_array_for_each_value'
- 'xbc_for_each_key_value'
- 'xbc_node_for_each_array_value'
- 'xbc_node_for_each_child'
- 'xbc_node_for_each_key_value'
- 'zorro_for_each_dev'
IncludeBlocks: Preserve
IncludeCategories:
- Regex: '.*'
Priority: 1
IncludeIsMainRegex: '(Test)?$'
IndentCaseLabels: false
IndentPPDirectives: None
IndentWidth: 8
IndentWrappedFunctionNames: false
JavaScriptQuotes: Leave
JavaScriptWrapImports: true
KeepEmptyLinesAtTheStartOfBlocks: false
MacroBlockBegin: ''
MacroBlockEnd: ''
MaxEmptyLinesToKeep: 1
NamespaceIndentation: None
ObjCBinPackProtocolList: Auto
ObjCBlockIndentWidth: 8
ObjCSpaceAfterProperty: true
ObjCSpaceBeforeProtocolList: true
# Taken from git's rules
#PenaltyBreakAssignment: 10
#PenaltyBreakBeforeFirstCallParameter: 30
#PenaltyBreakComment: 10
#PenaltyBreakFirstLessLess: 0
#PenaltyBreakString: 10
#PenaltyExcessCharacter: 100
#PenaltyReturnTypeOnItsOwnLine: 60
PointerAlignment: Right
ReflowComments: true
SortIncludes: true
SortUsingDeclarations: false
SpaceAfterCStyleCast: false
SpaceAfterTemplateKeyword: true
SpaceBeforeAssignmentOperators: true
SpaceBeforeCtorInitializerColon: true
SpaceBeforeInheritanceColon: true
SpaceBeforeParens: ControlStatements
SpaceBeforeRangeBasedForLoopColon: true
SpaceInEmptyParentheses: false
SpacesBeforeTrailingComments: 1
SpacesInAngles: false
SpacesInContainerLiterals: false
SpacesInCStyleCastParentheses: false
SpacesInParentheses: false
SpacesInSquareBrackets: false
Standard: Cpp03
TabWidth: 8
UseTab: Never
...
megapixels-1.4.3/.editorconfig 0000664 0000000 0000000 00000000254 14155633267 0016347 0 ustar 00root root 0000000 0000000 root = true
[*.{c,h}]
end_of_line = lf
insert_final_newline = true
charset = utf-8
trim_trailing_whitespace = true
indent_style = tab
indent_size = 8
max_line_length = 80
megapixels-1.4.3/.gitignore 0000664 0000000 0000000 00000001765 14155633267 0015672 0 ustar 00root root 0000000 0000000 # Created by .ignore support plugin (hsz.mobi)
### C template
# Prerequisites
*.d
# Object files
*.o
*.ko
*.obj
*.elf
# Linker output
*.ilk
*.map
*.exp
# Precompiled Headers
*.gch
*.pch
# Libraries
*.lib
*.a
*.la
*.lo
# Shared objects (inc. Windows DLLs)
*.dll
*.so
*.so.*
*.dylib
# Executables
*.exe
*.out
*.app
*.i*86
*.x86_64
*.hex
# Debug files
*.dSYM/
*.su
*.idb
*.pdb
# Kernel Module Compile Results
*.mod*
*.cmd
.tmp_versions/
modules.order
Module.symvers
Mkfile.old
dkms.conf
### C template
# Prerequisites
# Object files
# Linker output
# Precompiled Headers
# Libraries
# Shared objects (inc. Windows DLLs)
# Executables
# Debug files
# Kernel Module Compile Results
### CMake template
CMakeLists.txt.user
CMakeCache.txt
CMakeFiles
CMakeScripts
Testing
Makefile
cmake_install.cmake
install_manifest.txt
compile_commands.json
CTestTestfile.cmake
_deps
/.idea
/cmake-build-debug
/cmake-build-release
*~
/.ninja_deps
/.ninja_log
/build.ninja
/meson-info
/meson-private
/builddir
/build
megapixels-1.4.3/.gitlab-ci.yml 0000664 0000000 0000000 00000000755 14155633267 0016334 0 ustar 00root root 0000000 0000000 build:debian:
image: debian:bookworm-slim
before_script:
- apt update && apt -y install gcc meson ninja-build clang-format-12 libgtk-4-dev libtiff-dev libzbar-dev
script:
- meson build
- ninja -C build
- ninja -C build test
- ninja -C build clang-format-check
build:alpine:
image: alpine:edge
before_script:
- apk add --no-cache build-base meson samurai gtk4.0-dev tiff-dev zbar-dev
script:
- meson build
- ninja -C build
- ninja -C build test
megapixels-1.4.3/CMakeLists.txt 0000664 0000000 0000000 00000001147 14155633267 0016434 0 ustar 00root root 0000000 0000000 cmake_minimum_required(VERSION 3.14)
project(Megapixels C)
set(CMAKE_C_STANDARD 11)
# Use the package PkgConfig to detect GTK+ headers/library files
FIND_PACKAGE(PkgConfig REQUIRED)
PKG_CHECK_MODULES(GTK3 REQUIRED gtk+-3.0)
# Setup CMake to use GTK+, tell the compiler where to look for headers
# and to the linker where to look for libraries
INCLUDE_DIRECTORIES(${GTK3_INCLUDE_DIRS})
LINK_DIRECTORIES(${GTK3_LIBRARY_DIRS})
# Add other flags to the compiler
ADD_DEFINITIONS(${GTK3_CFLAGS_OTHER})
add_executable(megapixels main.c ini.c ini.h bayer.c bayer.h)
target_link_libraries(megapixels ${GTK3_LIBRARIES})
megapixels-1.4.3/LICENSE 0000664 0000000 0000000 00000104515 14155633267 0014704 0 ustar 00root root 0000000 0000000 GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc.
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU General Public License is a free, copyleft license for
software and other kinds of works.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
the GNU General Public License is intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users. We, the Free Software Foundation, use the
GNU General Public License for most of our software; it applies also to
any other work released this way by its authors. You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
To protect your rights, we need to prevent others from denying you
these rights or asking you to surrender the rights. Therefore, you have
certain responsibilities if you distribute copies of the software, or if
you modify it: responsibilities to respect the freedom of others.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must pass on to the recipients the same
freedoms that you received. You must make sure that they, too, receive
or can get the source code. And you must show them these terms so they
know their rights.
Developers that use the GNU GPL protect your rights with two steps:
(1) assert copyright on the software, and (2) offer you this License
giving you legal permission to copy, distribute and/or modify it.
For the developers' and authors' protection, the GPL clearly explains
that there is no warranty for this free software. For both users' and
authors' sake, the GPL requires that modified versions be marked as
changed, so that their problems will not be attributed erroneously to
authors of previous versions.
Some devices are designed to deny users access to install or run
modified versions of the software inside them, although the manufacturer
can do so. This is fundamentally incompatible with the aim of
protecting users' freedom to change the software. The systematic
pattern of such abuse occurs in the area of products for individuals to
use, which is precisely where it is most unacceptable. Therefore, we
have designed this version of the GPL to prohibit the practice for those
products. If such problems arise substantially in other domains, we
stand ready to extend this provision to those domains in future versions
of the GPL, as needed to protect the freedom of users.
Finally, every program is threatened constantly by software patents.
States should not allow patents to restrict development and use of
software on general-purpose computers, but in those that do, we wish to
avoid the special danger that patents applied to a free program could
make it effectively proprietary. To prevent this, the GPL assures that
patents cannot be used to render the program non-free.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Use with the GNU Affero General Public License.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU Affero General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the special requirements of the GNU Affero General Public License,
section 13, concerning interaction through a network will apply to the
combination as such.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
Copyright (C)
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see .
Also add information on how to contact you by electronic and paper mail.
If the program does terminal interaction, make it output a short
notice like this when it starts in an interactive mode:
Copyright (C)
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, your program's commands
might be different; for a GUI interface, you would use an "about box".
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU GPL, see
.
The GNU General Public License does not permit incorporating your program
into proprietary programs. If your program is a subroutine library, you
may consider it more useful to permit linking proprietary applications with
the library. If this is what you want to do, use the GNU Lesser General
Public License instead of this License. But first, please read
.
megapixels-1.4.3/README.md 0000664 0000000 0000000 00000042410 14155633267 0015151 0 ustar 00root root 0000000 0000000 # Megapixels
A GTK4 camera application that knows how to deal with the media request api. It uses
opengl to debayer the raw sensor data for the preview.
chat: #megapixels:postmarketos.org on matrix
## Building
```shell-session
$ meson build
$ cd build
$ ninja
$ sudo ninja install
```
# Config
Megapixels checks multiple locations for it's configuration file and uses the first one it finds.
As first step it will get the first compatible name in the device tree, in the case of a PinePhone
this might be "pine64,pinephone-1.2". Then that dtname will be used as the filename in the search
path in this order:
* $XDG_CONFIG_DIR/megapixels/config/$dtname.ini
* ~/.config/megapixels/config/$dtname.ini
* /etc/megapixels/config/$dtname.ini
* /usr/share/megapixels/config/$dtname.ini
The files in /usr/share/megapixels should be the config files distributed in this repository. The other
locations allow the user or distribution to override config.
## Config file format
Configuration files are INI format files.
### [device]
This provides global info, currently only the `make` and `model` keys exist, which is metadata added to the
generated pictures.
### All other sections
These are the sections describing the sensors.
* `driver=ov5640` the name of the media node that provides the sensor and it's /dev/v4l-subdev* node.
* `media-driver=sun6i-csi` the name of the media node that has this camera in it.
* `rotate=90` the rotation angle to make the sensor match the screen
* `mirrored=true` whether the output is mirrored, useful for front-facing cameras
* `colormatrix=` the DNG colormatrix1 attribute as 9 comma seperated floats
* `forwardmatrix=` the DNG forwardmatrix1 attribute as 9 comma seperated floats
* `blacklevel=10` The DNG blacklevel attribute for this camera
* `whitelevel=255` The DNG whitelevel attribute for this camera
* `focallength=3.33` The focal length of the camera, for EXIF
* `cropfactor=10.81` The cropfactor for the sensor in the camera, for EXIF
* `fnumber=3.0` The aperture size of the sensor, for EXIF
These sections have two possibly prefixes: `capture-` and `preview-`. Both sets
are required. Capture is used when a picture is taken, whereas preview is used
when previewing.
* `width=640` and `height=480` the resolution to use for the sensor
* `rate=15` the refresh rate in fps to use for the sensor
* `fmt=BGGR8` sets the pixel and bus formats used when capturing from the sensor, only BGGR8 is fully supported
# Post processing
Megapixels only captures raw frames and stores .dng files. It captures a 5 frame burst and saves it to a temporary
location. Then the postprocessing script is run which will generate the final .jpg file and writes it into the
pictures directory. Megapixels looks for the post processing script in the following locations:
* ./postprocess.sh
* $XDG_CONFIG_DIR/megapixels/postprocess.sh
* ~/.config/megapixels/postprocess.sh
* /etc/megapixels/postprocess.sh
* /usr/share/megapixels/postprocess.sh
The bundled postprocess.sh script will copy the first frame of the burst into the picture directory as an DNG
file and if dcraw and imagemagick are installed it will generate a JPG and also write that to the picture
directory. It supports either the full dcraw or dcraw_emu from libraw.
It is possible to write your own post processing pipeline my providing your own `postprocess.sh` script at
one of the above locations. The first argument to the script is the directory containing the temporary
burst files and the second argument is the final path for the image without an extension. For more details
see postprocess.sh in this repository.
# Developing
Megapixels is developed at: https://gitlab.com/postmarketOS/megapixels
## Source code organization
* `ini.c` contains a INI file format parser.
* `camera_config.c` describes how cameras are configured. Contains no state.
* `main.c` contains the entry point and UI portion of the application.
* `quickpreview.c` implements fast preview functionality, including debayering, color correction, rotation, etc.
* `io_pipeline.c` implements all IO interaction with V4L2 devices in a separate thread to prevent blocking.
* `process_pipeline.c` implements all process done on captured images, including launching post-processing
* `pipeline.c` Generic threaded message passing implementation based on glib, used to implement the pipelines.
* `camera.c` V4L2 abstraction layer to make working with cameras easier
* `device.c` V4L2 abstraction layer for devices
The primary image pipeline consists of the main application, the IO pipeline and
the process pipeline. The main application sends commands to the IO pipeline,
which in turn talks to the process pipeline, which then talks to the main
application. This way neither IO nor processing blocks the main application and
races are generally avoided.
Tests are located in `tests/`.
## Tools
All tools are contained in `tools/`
* `list_devices` lists all V4L2 devices and their hardware layout
* `camera_test` lists controls and video modes of a specific camera and tests capturing data from it
## Linux video subsystem
Most of the logic is contained inside `main.c`, but before we look at it, it is
convenient to have some basic notions about the Linux video subsystem that
Megapixels directly uses (instead of, for example, using a higher level
framework such as "gstreamer", as other camera apps do).
Typically, for "simple" video capture devices (such as some old webcams on a
PC), the Linux kernel creates an entry on `/dev/` called `/dev/videoX` (where X
can be `0`, `1`, ...). The user can then `open()` that file descriptor, use
standard `ioctl()`s on it to start/stop/configure the hardware and finally
`read()` from it to obtain individual video frames.
In the PinePhone we have two cameras ("front" and "rear") but, surprinsingly,
the Linux kernel does not expose two video devices but just a single one named
`/dev/video1`.
This is because, on the PinePhone, there is one single "capture device" and two
"image sensors" (one for each camera) attached to it:
```
.-----------. .--------------.
| |---------| front sensor ))))))
| Sensors | '--------------'
| interface | .--------------.
| |---------| rear sensor ))))))
'-----------' '--------------'
```
The only video device exposed (`/dev/video1`) represents the "sensors interface"
block, which can be configured at runtime to capture data from one sensor or the
other.
But there is more: in order to configure the properties of each sensor (example:
capture frame rate, auto exposure, ...), instead of issuing `ioctl()` calls on
`/dev/video1`, the Linux kernel (for this particular case) exposes two extra
devices (`/dev/v4l-subdev0` for one sensor and `/dev/v4l-subdev1` for the other
one)
How does the user know that `/dev/v4l-subdev0`, `/dev/v4l-subdev1` and
`/dev/video1` are related? Thanks to the "media subsystem": for "complex" cases
such as this one, the Linux kernel exposes an extra device (`/dev/mediaX`, where
X can be `0`, `1`, ...) that can be used to...
* Obtain the list of related devices to that "media interface".
* Link/unlink the different "blocks" at runtime.
Pheeew.... let's recap what we have to far:
* `/dev/mediaW` represents the "whole camera hardware"
* `/dev/videoX` is the "sensors interface" from where we will `read()` frames.
* `/dev/vl4-subdevY` and `/dev/vl4-subdevZ` can be used to configure the
sensors.
Notice how I used `W`, `X`, `Y` and `Z` instead of numbers. In the current
kernel `W==1`, `X==0`, `Y==0` and `Z==1`, but that might change in the future.
That's why `main()` needs to figure them out by following this procedure:
1. List all `/dev/mediaX` devices present (ex: `/dev/media0`, `/dev/media1`,
...)
2. Query each of them with `ioctl(MEDIA_IOC_DEVICE_INFO)` until we find the
entry managed by a driver named "sun6i-csi" (as that is the name of the
driver of the sensor interface for the [Allwinner SoC camera
sensor](https://linux-sunxi.org/CSI) that the PinePhone uses, which is
provided on the `*.ini` file).
3. Obtain a list of elements associated to that "media device" by calling
`ioctl(MEDIA_IOC_ENUM_ENTITIES)`.
4. The entry called "ov5640" is the rear camera (as that is the name of the
driver of the rear sensor, which is provided on the `*.ini` file). Save its
device name (ex: `/dev/v4l-subdev1`) for later.
5. The entry called "gc2145" is the front camera (as that is the name of the
driver of the front sensor, which is provided on the `*.ini` file). Save its
device name (ex: `/dev/v4l-subdev0`) for later.
6. The entry called "sun6i-csi" is the sensors interface (same name as the
driver in charge of the `/dev/mediaX` interface). Save its device name (ex:
`/dev/video1`) for later.
By the way, regarding steps 1 and 2, you can manually inspect the list of
"elements" that are related to a given `/dev/mediaX` entry from user space using
the `media-ctl` tool. This is what the current kernel and hardware revision
return:
```shell-session
$ media-tcl -d /dev/media1 -p
Media controller API version 5.7.19
Media device information
------------------------
driver sun6i-csi
model Allwinner Video Capture Device
serial
bus info
hw revision 0x0
driver version 5.7.19
Device topology
- entity 1: sun6i-csi (1 pad, 2 links)
type Node subtype V4L flags 0
device node name /dev/video1
pad0: Sink
<- "gc2145 4-003c":0 []
<- "ov5640 4-004c":0 [ENABLED]
- entity 5: gc2145 4-003c (1 pad, 1 link)
type V4L2 subdev subtype Sensor flags 0
device node name /dev/v4l-subdev0
pad0: Source
[fmt:YUYV8_2X8/1280x720@1/10 field:none colorspace:srgb]
-> "sun6i-csi":0 []
- entity 7: ov5640 4-004c (1 pad, 1 link)
type V4L2 subdev subtype Sensor flags 0
device node name /dev/v4l-subdev1
pad0: Source
[fmt:YUYV8_2X8/1280x720@1/30 colorspace:srgb xfer:srgb ycbcr:601 quantization:full-range]
-> "sun6i-csi":0 [ENABLED]
```
...which means what we already know: `sun6i-csi` is the sensors interface sink
(on `/dev/video1`) where the two sensors (`gc2145` on `/dev/v4l-subdev0` and
`ov5640` on `/dev/v4l-subdev1` are connected). By default (or, at least, in the
example above) the sensors interface is connected to the rear camera (`ov5640`)
as its link is the only one "ENABLED".
Anyway... once `main()` has figured out the values of `W`, `X`, `Y` and `Z`,
this is how all these device entries are used to manage the camera hardware:
* Use `ioctl(MEDIA_IOC_SETUP_LINK)` on the `/dev/mediaW` entry to "link" the
sensors interface with either the rear sensor or the front sensor (this is
how we choose from which camera we will be capturing frames)
* Use `ioctl(VIDIOC_SUBDEV_...)` on `/dev/v4l-subdev{Y,Z}` to configure the
sensors.
* Use `ioctl(VIDIOC_...)` on `/dev/videoX` to configure the sensors interface.
* Use `read()` on `/dev/videoX` to capture frames.
The mechanism described on the last point (ie. use `read()` to capture frames),
while possible, is not actually what `main()` does. Instead, a more complex
mechanism (described
[here](https://www.kernel.org/doc/html/latest/userspace-api/media/v4l/io.html))
is used, where a series of buffers are allocated, sent to `/dev/videoX` with
`ioctl(VIDIOC_QBUF)` and then retrieved with `ioctl(VIDIOC_DQBUF)` once they
have been filled with video frames (after having called
`ioctl(VIDIOC_STREAMON)`)... but it is basically the same as performing a
`read()` (except that it has more flexibility).
## Source code walkthrough
As we have just seen on the [previous section](#linux-video-subsystem), in the
current kernel version, and for the latest PinePhone revision (1.2a), the Linux
kernel exposes 4 device entries to manage the camera hardware:
* `/dev/media1` to select the active camera ("front" or "rear")
* `/dev/vl4-subdev0` and `/dev/vl4-subdev1` to configure the sensor of each
camera (aperture, auto exposure, etc...)
* `/dev/video1` to capture frames (video stream and/or pictures)
However these device entries might change with future versions of the kernel
and/or the hardware (for example, `/dev/video3` instead of `/dev/video1`), and
that's why function `main()` in `main.c` starts by trying to figure out the
correct names.
It does so by checking the hardware revision in `/proc/device-tree/compatible`
and then opening the corresponding `.ini` file from the config folder (ex:
`pine64,pinephone-1.2.ini` for the latest PinePhone revision as of today,
`pine64,pinetab.ini` for the PineTab, etc...).
The `.ini` file contains the name of the driver that manages the `/dev/mediaX`
interface (`csi` entry on the `device` section) and, from there, `main()` can
figure out the rest of the device names as already explained on the [previous
section](#linux-video-subsystem).
```
/proc/device-tree/compatible
|
|
V
config/*.ini ---------------.
| |
| V
| .~~~~~~~~~~~~~~~~~~~~~~~~~~~~
| : :
| : .----> /dev/video1 :
V : | :
/dev/media1 ------+----> /dev/v4l-subdev0 :
: | :
: '----> /dev/v4l-subdev1 :
: :
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~
```
Anyway... in addition to figuring out these entry names, `main()` also prepares
the GTK widgets layout and installs a series of callbacks. Among them we find
these two:
1. One on the "switch camera button" (`on_camera_switch_clicked()`) which uses
`/dev/media1` to switch between the front and rear cameras.
Every time this happens, the sensors and the sensors interface are
reconfigured according to the parameters provided on the `.ini` file using
`/dev/video1`, `/dev/v4l-subdev0` and `/v4l-subdev1`.
```
on_camera_switch_clicked()
|
|--> stop_capturing()
| `--> ioctl('/dev/video1', ...) # Stop processing frames
|
|--> setup_front() or setup_rear()
| |--> ioctl('/dev/media1', ...)
| `--> init_sensor()
| `--> ioctl('/dev/v4l-subdev{0,1}') # Reconfigure sensor
|
|--> init_device()
| `--> ioctl('/dev/video1') # Reconfigure sensors interface
|
`--> start_capturing()
`--> ioctl('/dev/video1') # Resume capturing frames
```
2. Another one on the "take a photo button" (`on_shutter_clicked()`) which
will use `/dev/v4l-subdev{0,1}` to disable hardware "auto gain" and "auto
exposure" and initiate the "single frame capture process" (described later).
Finally, before calling GTK's main loop, `main()` installs another function
(`get_frame()`) on the "nothing else todo" GTK slot. It will thus be called
continuosly as long as there are no other GTK events queued (ie. almost always).
This `get_frame()` function is where the magic happens: it will call
`read_frame()` to `read()` from the `/dev/video1` device an image frame and
then call `process_image()` to process it.
> NOTE: As explained at the end of the [Linux video subsystem
> section](linux-video-subsystem), it is a bit more complex than that (that's
> why you will find a `ioctl()` instead of a `read()` inside `read_frame()`),
> but for all purposes, you can ignore this fact.
So... let's recap: as long as the user does not click on any application button,
the `process_image()` function is being called all the time with a pointer to
the latest captured frame. What does it do with it?
The captured frame buffer contains "RAW data", whose format depends on the value
specified on the `.ini` file for each sensor. Right now we are using `BGGR8` for
both of them, so the function that takes this buffer to process it is always the
same (`quick_debayer_bggr8()`). The result is a buffer of "standard pixels" that
can be drawn to screen using GTK/cairo functions.
When the user clicks on the "take a photo button", however, a special global
variable (`capture`) is set so that the next `N` times (currently `N==10`), the
`process_image()` will do something different:
1. It will first retrieve the latest "auto gain" and "auto exposure" values
(remember they were disabled when the user clicked on the "take a photo
button").
2. It will save the latest captured buffer (in "RAW data" format, ie. `BGGR8`)
to a `.dng` file using the "TIFF" library, which makes it possible to attach
all the needed metadata (which Megapixels extracts from the hardware itself
and/or the values on the `.ini` file).
3. In addition, **only** the very last time (from the `N` times):
- The captured buffer is run through `quick_debayer_bggr8()` and the result
printed to the UI.
- The `postprocess.sh` script (see the [Post processing
section](#post-processing)) is called with two arguments: the path to the
`/tmp` folder where the `N` `.dng` images have been saved and the path
and filename where the resulting post-processed (typically JPEG) image
should be saved to (as a result of running `postprocess.sh`)
- "Auto exposure" and "auto gain" are re-enabled.
In other words: every time the user clicks on the "take a photo button", `N`
RAW images are saved and `postprocess.sh` called, which is expected to take
those `N` images and generate a final JPEG.
megapixels-1.4.3/clang-format.sh 0000775 0000000 0000000 00000000066 14155633267 0016604 0 ustar 00root root 0000000 0000000 #!/bin/sh
cd "$MESON_SOURCE_ROOT"
clang-format-12 $@
megapixels-1.4.3/config/ 0000775 0000000 0000000 00000000000 14155633267 0015136 5 ustar 00root root 0000000 0000000 megapixels-1.4.3/config/motorola,osprey.ini 0000664 0000000 0000000 00000000522 14155633267 0021010 0 ustar 00root root 0000000 0000000 [device]
make=Motorola
model=Osprey
[rear]
driver=imx214
media-driver=qcom-camss
capture-width=4096
capture-height=2304
capture-rate=30
capture-fmt=RGGB10P
preview-width=4096
preview-height=2304
preview-rate=30
preview-fmt=RGGB10P
rotate=270
media-links=msm_csiphy0:1->msm_csid0:0,msm_csid0:1->msm_ispif0:0,msm_ispif0:1->msm_vfe0_rdi0:0
megapixels-1.4.3/config/pine64,pinephone-1.0.ini 0000664 0000000 0000000 00000001450 14155633267 0021222 0 ustar 00root root 0000000 0000000 [device]
make=PINE64
model=PinePhone
[rear]
driver=ov5640
media-driver=sun6i-csi
capture-width=2592
capture-height=1944
capture-rate=10
capture-fmt=BGGR8
preview-width=1280
preview-height=720
preview-rate=30
preview-fmt=BGGR8
rotate=270
colormatrix=1.384,-0.3203,-0.0124,-0.2728,1.049,0.1556,-0.0506,0.2577,0.8050
forwardmatrix=0.7331,0.1294,0.1018,0.3039,0.6698,0.0263,0.0002,0.0556,0.7693
blacklevel=3
whitelevel=255
focallength=3.33
cropfactor=10.81
fnumber=3.0
iso-min=100
iso-max=64000
flash-path=/sys/class/leds/white:flash
[front]
driver=gc2145
media-driver=sun6i-csi
capture-width=1280
capture-height=960
capture-rate=60
capture-fmt=BGGR8
preview-width=1280
preview-height=960
preview-rate=60
preview-fmt=BGGR8
rotate=90
mirrored=true
focallength=2.6
cropfactor=12.7
fnumber=2.8
flash-display=true
megapixels-1.4.3/config/pine64,pinephone-1.1.ini 0000664 0000000 0000000 00000001450 14155633267 0021223 0 ustar 00root root 0000000 0000000 [device]
make=PINE64
model=PinePhone
[rear]
driver=ov5640
media-driver=sun6i-csi
capture-width=2592
capture-height=1944
capture-rate=10
capture-fmt=BGGR8
preview-width=1280
preview-height=720
preview-rate=30
preview-fmt=BGGR8
rotate=270
colormatrix=1.384,-0.3203,-0.0124,-0.2728,1.049,0.1556,-0.0506,0.2577,0.8050
forwardmatrix=0.7331,0.1294,0.1018,0.3039,0.6698,0.0263,0.0002,0.0556,0.7693
blacklevel=3
whitelevel=255
focallength=3.33
cropfactor=10.81
fnumber=3.0
iso-min=100
iso-max=64000
flash-path=/sys/class/leds/white:flash
[front]
driver=gc2145
media-driver=sun6i-csi
capture-width=1280
capture-height=960
capture-rate=60
capture-fmt=BGGR8
preview-width=1280
preview-height=960
preview-rate=60
preview-fmt=BGGR8
rotate=90
mirrored=true
focallength=2.6
cropfactor=12.7
fnumber=2.8
flash-display=true
megapixels-1.4.3/config/pine64,pinephone-1.2.ini 0000664 0000000 0000000 00000001467 14155633267 0021234 0 ustar 00root root 0000000 0000000 [device]
make=PINE64
model=PinePhone
[rear]
driver=ov5640
media-driver=sun6i-csi
capture-width=2592
capture-height=1944
capture-rate=10
capture-fmt=BGGR8
preview-width=1280
preview-height=720
preview-rate=30
preview-fmt=BGGR8
rotate=270
mirrored=false
colormatrix=1.384,-0.3203,-0.0124,-0.2728,1.049,0.1556,-0.0506,0.2577,0.8050
forwardmatrix=0.7331,0.1294,0.1018,0.3039,0.6698,0.0263,0.0002,0.0556,0.7693
blacklevel=3
whitelevel=255
focallength=3.33
cropfactor=10.81
fnumber=3.0
iso-min=100
iso-max=64000
flash-path=/sys/class/leds/white:flash
[front]
driver=gc2145
media-driver=sun6i-csi
capture-width=1280
capture-height=960
capture-rate=60
capture-fmt=BGGR8
preview-width=1280
preview-height=960
preview-rate=60
preview-fmt=BGGR8
rotate=90
mirrored=true
focallength=2.6
cropfactor=12.7
fnumber=2.8
flash-display=true
megapixels-1.4.3/config/pine64,pinetab.ini 0000664 0000000 0000000 00000001356 14155633267 0020370 0 ustar 00root root 0000000 0000000 [device]
make=PINE64
model=PinePhone
[rear]
driver=ov5640
media-driver=sun6i-csi
capture-width=2592
capture-height=1944
capture-rate=10
capture-fmt=BGGR8
preview-width=1280
preview-height=720
preview-rate=20
preview-fmt=BGGR8
rotate=270
colormatrix=1.384,-0.3203,-0.0124,-0.2728,1.049,0.1556,-0.0506,0.2577,0.8050
forwardmatrix=0.7331,0.1294,0.1018,0.3039,0.6698,0.0263,0.0002,0.0556,0.7693
blacklevel=3
whitelevel=255
focallength=3.33
cropfactor=10.81
fnumber=3.0
iso-min=100
iso-max=64000
[front]
driver=gc2145
media-driver=sun6i-csi
capture-width=1280
capture-height=960
capture-rate=30
capture-fmt=BGGR8
preview-width=1280
preview-height=960
preview-rate=30
preview-fmt=BGGR8
rotate=90
mirrored=true
focallength=2.6
cropfactor=12.7
fnumber=2.8
megapixels-1.4.3/data/ 0000775 0000000 0000000 00000000000 14155633267 0014602 5 ustar 00root root 0000000 0000000 megapixels-1.4.3/data/blit.frag 0000664 0000000 0000000 00000000250 14155633267 0016372 0 ustar 00root root 0000000 0000000 #ifdef GL_ES
precision mediump float;
#endif
uniform sampler2D texture;
varying vec2 uv;
void
main()
{
gl_FragColor = vec4(texture2D(texture, uv).rgb, 1);
}
megapixels-1.4.3/data/blit.vert 0000664 0000000 0000000 00000000354 14155633267 0016440 0 ustar 00root root 0000000 0000000 #ifdef GL_ES
precision mediump float;
#endif
attribute vec2 vert;
attribute vec2 tex_coord;
uniform mat3 transform;
varying vec2 uv;
void
main()
{
uv = tex_coord;
gl_Position = vec4(transform * vec3(vert, 1), 1);
}
megapixels-1.4.3/data/camera.css 0000664 0000000 0000000 00000000354 14155633267 0016546 0 ustar 00root root 0000000 0000000 .errorbox {
background: #dd0000;
color: #ffffff;
}
.controlbox {
background: rgba(0, 0, 0, 0.2);
}
.button-overlay {
opacity: 0.9;
background-color: rgba(0, 0, 0, 0.2);
}
.flash {
background-color: #ffffff;
}
megapixels-1.4.3/data/camera.ui 0000664 0000000 0000000 00000032210 14155633267 0016367 0 ustar 00root root 0000000 0000000
megapixels-1.4.3/data/controls-popover.ui 0000664 0000000 0000000 00000002655 14155633267 0020504 0 ustar 00root root 0000000 0000000
vertical15horizontal15horizontal0150centerauto5
megapixels-1.4.3/data/debayer.frag 0000664 0000000 0000000 00000002754 14155633267 0017066 0 ustar 00root root 0000000 0000000 #ifdef GL_ES
precision mediump float;
#endif
uniform sampler2D texture;
uniform mat3 color_matrix;
varying vec2 top_left_uv;
varying vec2 top_right_uv;
varying vec2 bottom_left_uv;
varying vec2 bottom_right_uv;
void
main()
{
// Note the coordinates for texture samples need to be a varying, as the
// Mali-400 has this as a fast path allowing 32-bit floats. Otherwise
// they end up as 16-bit floats and that's not accurate enough.
vec4 samples = vec4(texture2D(texture, top_left_uv).r,
texture2D(texture, top_right_uv).r,
texture2D(texture, bottom_left_uv).r,
texture2D(texture, bottom_right_uv).r);
// Assume BGGR for now. Currently this just takes 3 of the four samples
// for each pixel, there's room here to do some better debayering.
vec3 color = vec3(samples.w, (samples.y + samples.z) / 2.0, samples.x);
// Some crude blacklevel correction to make the preview a bit nicer, this
// should be an uniform
vec3 corrected = color - 0.02;
// Apply the color matrices
// vec3 corrected = color_matrix * color2;
// Fast SRGB estimate. See https://mimosa-pudica.net/fast-gamma/
vec3 srgb_color =
(vec3(1.138) * inversesqrt(corrected) - vec3(0.138)) * corrected;
// Slow SRGB estimate
// vec3 srgb_color = pow(color, vec3(1.0 / 2.2));
gl_FragColor = vec4(srgb_color, 1);
}
megapixels-1.4.3/data/debayer.vert 0000664 0000000 0000000 00000001070 14155633267 0017115 0 ustar 00root root 0000000 0000000 #ifdef GL_ES
precision mediump float;
#endif
attribute vec2 vert;
attribute vec2 tex_coord;
uniform mat3 transform;
uniform vec2 pixel_size;
varying vec2 top_left_uv;
varying vec2 top_right_uv;
varying vec2 bottom_left_uv;
varying vec2 bottom_right_uv;
void
main()
{
top_left_uv = tex_coord - pixel_size / 2.0;
bottom_right_uv = tex_coord + pixel_size / 2.0;
top_right_uv = vec2(top_left_uv.x, bottom_right_uv.y);
bottom_left_uv = vec2(bottom_right_uv.x, top_left_uv.y);
gl_Position = vec4(transform * vec3(vert, 1), 1);
}
megapixels-1.4.3/data/flash-disabled-symbolic.svg 0000664 0000000 0000000 00000002224 14155633267 0022004 0 ustar 00root root 0000000 0000000
megapixels-1.4.3/data/flash-enabled-symbolic.svg 0000664 0000000 0000000 00000001740 14155633267 0021631 0 ustar 00root root 0000000 0000000
megapixels-1.4.3/data/folder-symbolic.svg 0000664 0000000 0000000 00000003001 14155633267 0020407 0 ustar 00root root 0000000 0000000
megapixels-1.4.3/data/meson.build 0000664 0000000 0000000 00000001462 14155633267 0016747 0 ustar 00root root 0000000 0000000 resources = gnome.compile_resources('megapixels-resources',
'org.postmarketos.Megapixels.gresource.xml')
install_data(['org.postmarketos.Megapixels.desktop'],
install_dir: get_option('datadir') / 'applications')
install_data(['org.postmarketos.Megapixels.metainfo.xml'],
install_dir: get_option('datadir') / 'metainfo')
install_data('org.postmarketos.Megapixels.svg',
install_dir: join_paths(get_option('datadir'), 'icons/hicolor/scalable/apps'))
install_data(['postprocess.sh'],
install_dir: get_option('datadir') / 'megapixels/',
install_mode: 'rwxr-xr-x')
settings_schemas = ['org.postmarketos.Megapixels.gschema.xml']
schemas_dir = get_option('datadir') / 'glib-2.0' / 'schemas'
install_data(settings_schemas, install_dir: schemas_dir)
gnome.compile_schemas(depend_files: files(settings_schemas))
megapixels-1.4.3/data/org.postmarketos.Megapixels.desktop 0000664 0000000 0000000 00000000274 14155633267 0023616 0 ustar 00root root 0000000 0000000 [Desktop Entry]
Name=Megapixels
Exec=megapixels
Terminal=false
Type=Application
Categories=GTK;
Icon=org.postmarketos.Megapixels
X-Purism-FormFactor=Workstation;Mobile;
StartupNotify=true
megapixels-1.4.3/data/org.postmarketos.Megapixels.gresource.xml 0000664 0000000 0000000 00000001664 14155633267 0024746 0 ustar 00root root 0000000 0000000
camera.uicontrols-popover.uicamera.cssflash-disabled-symbolic.svgflash-enabled-symbolic.svgfolder-symbolic.svgsettings-symbolic.svgshutter-button-symbolic.svgshutter-symbolic.svgswitch-camera-symbolic.svgblit.vertblit.fragsolid.vertsolid.fragdebayer.vertdebayer.frag
megapixels-1.4.3/data/org.postmarketos.Megapixels.gschema.xml 0000664 0000000 0000000 00000001053 14155633267 0024347 0 ustar 00root root 0000000 0000000
trueDon't throw away the .dng file after post processing
Megapixels will write a .dng file that's passed to the post processing script
if this setting is disabled the post-process script will be told to clean it
up after processing.
megapixels-1.4.3/data/org.postmarketos.Megapixels.metainfo.xml 0000664 0000000 0000000 00000015534 14155633267 0024553 0 ustar 00root root 0000000 0000000
org.postmarketos.MegapixelsCC0-1.0GPL-3.0MegapixelsA gnome camera application for phones
Megapixels is a camera application designed for phones and tablets. It
implements the v4l2 and media-request apis so set up camera pipelines on
ARM hardware and uses the raw data modes of the sensors to get the best
quality pictures.
Improved color handleing in the processed pictures
Better wording on the QR dialogs
Redesigned the data QR dialog
Bugfix release
Megapixels now has GPU acceleration with GTK4
Release with some small improvements:
The thumbnail button now has a spinner on it while the photo is post-processing
The hardcoded ~/Pictures path is no longer used
Bugfix release that solves:
libtiff incompatability because debian broke libtiff
dropped hdr_stacker from the post processing script
Switching a control from auto to manual now syncs the value
Small update to reduce preview latency
Major performance improvement release. Thanks to Benjamin Schaaf this release
uses threading to process frames for the preview and makes more of the app
async, improving the responsiveness a lot.
This release also adds resolution switching when taking a picture so the preview
can run on a lower resolution.
Don't wait for camera to turn on before showing window
Use seperate thread for image processing, keeping UI responsive
Apply color calibration in the preview
Run camera at lower resolution while previewing
Added seperate tools for camera testing from the command line
This release has a rewrite in the config file parsing and camera handeling.
Support 1-5 cameras now instead of the hardcoded 2
Support cameras being in their own video and media node
Debayering now supports 4 pixel orders instead of the hardcoded BGGR
Added Appstream metainfo for graphical package management software
The preview now has the correct gamma curve, making the images brighter so you can see what you're capturing
Fixed some posix compliance issues
This is the UI improvement release. Now it's possible to change exposure settings in the app preview
Added UI for camera controls
Slight image quality improvements on the generated .jpg files
Better support for other releases of dcraw
Fixed bug on locales with a comma as decimal seperator
This release rewrites a lot of the image processing pipeline. Megapixels now
only directly outputs .dng files and then triggers an external processing
pipeline to convert that into the desired image formats
Capture a burst of 5 raw frames when pressing the shutter button
Added colorspace metadata to the images
Added exif tags to the generated images
If the sensor supports autofocus then that is triggered on the start of the application
Fixed some memory leaks
This release brings mainly UI improvements
Settings button does slightly more now, but is still useless
Made the shutter button more visible
Use all svg icons in the bottom bar
Added button to open the image storage folder
Added button tthat opens the latest image
Taking a picture now sets a thumbnail on the latest image button
First distro-packaged release of Megapixels
megapixels-1.4.3/data/org.postmarketos.Megapixels.svg 0000664 0000000 0000000 00000013140 14155633267 0022740 0 ustar 00root root 0000000 0000000
megapixels-1.4.3/data/postprocess.sh 0000775 0000000 0000000 00000005235 14155633267 0017532 0 ustar 00root root 0000000 0000000 #!/bin/sh
# The post-processing script gets called after taking a burst of
# pictures into a temporary directory. The first argument is the
# directory containing the raw files in the burst. The contents
# are 1.dng, 2.dng.... up to the number of photos in the burst.
#
# The second argument is the filename for the final photo without
# the extension, like "/home/user/Pictures/IMG202104031234"
#
# The third argument is 1 or 0 for the cleanup user config. If this
# is 0 the .dng file should not be moved to the output directory
#
# The post-processing script is responsible for cleaning up
# temporary directory for the burst.
set -e
if [ "$#" -ne 3 ]; then
echo "Usage: $0 [burst-dir] [target-name] [save-dng]"
exit 2
fi
BURST_DIR="$1"
TARGET_NAME="$2"
SAVE_DNG="$3"
MAIN_PICTURE="$BURST_DIR"/1
# Copy the first frame of the burst as the raw photo
cp "$BURST_DIR"/1.dng "$TARGET_NAME.dng"
# Create a .jpg if raw processing tools are installed
DCRAW=""
TIFF_EXT="dng.tiff"
if command -v "dcraw_emu" > /dev/null
then
DCRAW=dcraw_emu
# -fbdd 1 Raw denoising with FBDD
set -- -fbdd 1
elif [ -x "/usr/lib/libraw/dcraw_emu" ]; then
DCRAW=/usr/lib/libraw/dcraw_emu
# -fbdd 1 Raw denoising with FBDD
set -- -fbdd 1
elif command -v "dcraw" > /dev/null
then
DCRAW=dcraw
TIFF_EXT="tiff"
set --
fi
CONVERT=""
if command -v "convert" > /dev/null
then
CONVERT="convert"
# -fbdd 1 Raw denoising with FBDD
set -- -fbdd 1
elif command -v "gm" > /dev/null
then
CONVERT="gm"
fi
if [ -n "$DCRAW" ]; then
# +M use embedded color matrix
# -H 4 Recover highlights by rebuilding them
# -o 1 Output in sRGB colorspace
# -q 3 Debayer with AHD algorithm
# -T Output TIFF
$DCRAW +M -H 4 -o 1 -q 3 -T "$@" "$MAIN_PICTURE.dng"
# If imagemagick is available, convert the tiff to jpeg and apply slight sharpening
if [ -n "$CONVERT" ];
then
if [ "$CONVERT" = "convert" ]; then
convert "$MAIN_PICTURE.$TIFF_EXT" -sharpen 0x1.0 -sigmoidal-contrast 6,50% "$TARGET_NAME.jpg"
else
# sadly sigmoidal contrast is not available in imagemagick
gm convert "$MAIN_PICTURE.$TIFF_EXT" -sharpen 0x1.0 "$TARGET_NAME.jpg"
fi
# If exiftool is installed copy the exif data over from the tiff to the jpeg
# since imagemagick is stupid
if command -v exiftool > /dev/null
then
exiftool -tagsFromfile "$MAIN_PICTURE.$TIFF_EXT" \
-software="Megapixels" \
-overwrite_original "$TARGET_NAME.jpg"
fi
echo "$TARGET_NAME.jpg"
else
cp "$MAIN_PICTURE.$TIFF_EXT" "$TARGET_NAME.tiff"
echo "$TARGET_NAME.tiff"
fi
fi
# Clean up the temp dir containing the burst
rm -rf "$BURST_DIR"
# Clean up the .dng if the user didn't want it
if [ "$SAVE_DNG" -eq "0" ]; then
rm "$TARGET_NAME.dng"
fi
megapixels-1.4.3/data/settings-symbolic.svg 0000664 0000000 0000000 00000003155 14155633267 0021006 0 ustar 00root root 0000000 0000000
megapixels-1.4.3/data/shutter-button-symbolic.svg 0000664 0000000 0000000 00000003055 14155633267 0022154 0 ustar 00root root 0000000 0000000
megapixels-1.4.3/data/shutter-symbolic.svg 0000664 0000000 0000000 00000003543 14155633267 0020645 0 ustar 00root root 0000000 0000000
megapixels-1.4.3/data/solid.frag 0000664 0000000 0000000 00000000161 14155633267 0016553 0 ustar 00root root 0000000 0000000 #ifdef GL_ES
precision mediump float;
#endif
uniform vec4 color;
void
main()
{
gl_FragColor = color;
}
megapixels-1.4.3/data/solid.vert 0000664 0000000 0000000 00000000174 14155633267 0016620 0 ustar 00root root 0000000 0000000 #ifdef GL_ES
precision mediump float;
#endif
attribute vec2 vert;
void
main()
{
gl_Position = vec4(vert, 0, 1);
}
megapixels-1.4.3/data/switch-camera-symbolic.svg 0000664 0000000 0000000 00000005077 14155633267 0021702 0 ustar 00root root 0000000 0000000
megapixels-1.4.3/meson.build 0000664 0000000 0000000 00000006053 14155633267 0016037 0 ustar 00root root 0000000 0000000 project('megapixels', 'c', version: '1.4.3')
gnome = import('gnome')
gtkdep = dependency('gtk4')
tiff = dependency('libtiff-4')
zbar = dependency('zbar')
threads = dependency('threads')
# gl = dependency('gl')
epoxy = dependency('epoxy')
cc = meson.get_compiler('c')
libm = cc.find_library('m', required: false)
subdir('data')
conf = configuration_data()
conf.set_quoted('DATADIR', join_paths(get_option('prefix'), get_option('datadir')))
conf.set_quoted('SYSCONFDIR', get_option('sysconfdir'))
configure_file(
output: 'config.h',
configuration: conf)
add_global_arguments('-DVERSION="@0@"'.format(meson.project_version()), language: 'c')
# Define DEBUG for debug builds only (debugoptimized is not included on this one)
if get_option('buildtype') == 'debug'
add_global_arguments('-DDEBUG', language: 'c')
endif
# Workaround for libtiff having ABI changes but not changing the internal
# version number
if get_option('tiffcfapattern')
add_global_arguments('-DLIBTIFF_CFA_PATTERN', language: 'c')
endif
executable('megapixels',
'src/camera.c',
'src/camera_config.c',
'src/device.c',
'src/flash.c',
'src/gl_util.c',
'src/gles2_debayer.c',
'src/ini.c',
'src/io_pipeline.c',
'src/main.c',
'src/matrix.c',
'src/pipeline.c',
'src/process_pipeline.c',
'src/zbar_pipeline.c',
resources,
include_directories: 'src/',
dependencies: [gtkdep, libm, tiff, zbar, threads, epoxy],
install: true,
link_args: '-Wl,-ldl')
install_data(
[
'config/pine64,pinephone-1.0.ini',
'config/pine64,pinephone-1.1.ini',
'config/pine64,pinephone-1.2.ini',
'config/pine64,pinetab.ini',
],
install_dir: get_option('datadir') / 'megapixels/config/')
# Tools
executable('megapixels-list-devices',
'tools/list_devices.c',
'src/device.c',
include_directories: 'src/',
dependencies: [gtkdep],
install: true)
executable('megapixels-camera-test',
'tools/camera_test.c',
'src/camera.c',
'src/device.c',
include_directories: 'src/',
dependencies: [gtkdep],
install: true)
# Formatting
clang_format = find_program('clang-format-12', required: false)
if clang_format.found()
format_files = [
'data/blit.frag',
'data/blit.vert',
'data/debayer.frag',
'data/debayer.vert',
'data/solid.frag',
'data/solid.vert',
'src/camera.c',
'src/camera.h',
'src/camera_config.c',
'src/camera_config.h',
'src/device.c',
'src/device.h',
'src/flash.c',
'src/flash.h',
'src/gl_util.c',
'src/gl_util.h',
'src/gles2_debayer.c',
'src/gles2_debayer.h',
'src/io_pipeline.c',
'src/io_pipeline.h',
'src/main.c',
'src/main.h',
'src/matrix.c',
'src/matrix.h',
'src/pipeline.c',
'src/pipeline.h',
'src/process_pipeline.c',
'src/process_pipeline.h',
'src/zbar_pipeline.c',
'src/zbar_pipeline.h',
'tools/camera_test.c',
'tools/list_devices.c',
]
run_target('clang-format',
command: ['clang-format.sh', '-i'] + format_files)
run_target('clang-format-check',
command: ['clang-format.sh', '-n', '-Werror'] + format_files)
endif
megapixels-1.4.3/meson_options.txt 0000664 0000000 0000000 00000000071 14155633267 0017324 0 ustar 00root root 0000000 0000000 option('tiffcfapattern', type: 'boolean', value: false)
megapixels-1.4.3/src/ 0000775 0000000 0000000 00000000000 14155633267 0014460 5 ustar 00root root 0000000 0000000 megapixels-1.4.3/src/camera.c 0000664 0000000 0000000 00000134426 14155633267 0016066 0 ustar 00root root 0000000 0000000 #include "camera.h"
#include
#include
#include
#include
#include
#include
#include
#include
#define MAX_VIDEO_BUFFERS 20
#define MAX_BG_TASKS 8
static const char *pixel_format_names[MP_PIXEL_FMT_MAX] = {
"unsupported", "BGGR8", "GBRG8", "GRBG8", "RGGB8", "BGGR10P",
"GBRG10P", "GRBG10P", "RGGB10P", "UYVY", "YUYV",
};
const char *
mp_pixel_format_to_str(uint32_t pixel_format)
{
g_return_val_if_fail(pixel_format < MP_PIXEL_FMT_MAX, "INVALID");
return pixel_format_names[pixel_format];
}
MPPixelFormat
mp_pixel_format_from_str(const char *name)
{
for (MPPixelFormat i = 0; i < MP_PIXEL_FMT_MAX; ++i) {
if (strcasecmp(pixel_format_names[i], name) == 0) {
return i;
}
}
g_return_val_if_reached(MP_PIXEL_FMT_UNSUPPORTED);
}
static const uint32_t pixel_format_v4l_pixel_formats[MP_PIXEL_FMT_MAX] = {
0,
V4L2_PIX_FMT_SBGGR8,
V4L2_PIX_FMT_SGBRG8,
V4L2_PIX_FMT_SGRBG8,
V4L2_PIX_FMT_SRGGB8,
V4L2_PIX_FMT_SBGGR10P,
V4L2_PIX_FMT_SGBRG10P,
V4L2_PIX_FMT_SGRBG10P,
V4L2_PIX_FMT_SRGGB10P,
V4L2_PIX_FMT_UYVY,
V4L2_PIX_FMT_YUYV,
};
uint32_t
mp_pixel_format_to_v4l_pixel_format(MPPixelFormat pixel_format)
{
g_return_val_if_fail(pixel_format < MP_PIXEL_FMT_MAX, 0);
return pixel_format_v4l_pixel_formats[pixel_format];
}
MPPixelFormat
mp_pixel_format_from_v4l_pixel_format(uint32_t v4l_pixel_format)
{
for (MPPixelFormat i = 0; i < MP_PIXEL_FMT_MAX; ++i) {
if (pixel_format_v4l_pixel_formats[i] == v4l_pixel_format) {
return i;
}
}
return MP_PIXEL_FMT_UNSUPPORTED;
}
static const uint32_t pixel_format_v4l_bus_codes[MP_PIXEL_FMT_MAX] = {
0,
MEDIA_BUS_FMT_SBGGR8_1X8,
MEDIA_BUS_FMT_SGBRG8_1X8,
MEDIA_BUS_FMT_SGRBG8_1X8,
MEDIA_BUS_FMT_SRGGB8_1X8,
MEDIA_BUS_FMT_SBGGR10_1X10,
MEDIA_BUS_FMT_SGBRG10_1X10,
MEDIA_BUS_FMT_SGRBG10_1X10,
MEDIA_BUS_FMT_SRGGB10_1X10,
MEDIA_BUS_FMT_UYVY8_2X8,
MEDIA_BUS_FMT_YUYV8_2X8,
};
uint32_t
mp_pixel_format_to_v4l_bus_code(MPPixelFormat pixel_format)
{
g_return_val_if_fail(pixel_format < MP_PIXEL_FMT_MAX, 0);
return pixel_format_v4l_bus_codes[pixel_format];
}
MPPixelFormat
mp_pixel_format_from_v4l_bus_code(uint32_t v4l_bus_code)
{
for (MPPixelFormat i = 0; i < MP_PIXEL_FMT_MAX; ++i) {
if (pixel_format_v4l_bus_codes[i] == v4l_bus_code) {
return i;
}
}
return MP_PIXEL_FMT_UNSUPPORTED;
}
uint32_t
mp_pixel_format_bits_per_pixel(MPPixelFormat pixel_format)
{
g_return_val_if_fail(pixel_format < MP_PIXEL_FMT_MAX, 0);
switch (pixel_format) {
case MP_PIXEL_FMT_BGGR8:
case MP_PIXEL_FMT_GBRG8:
case MP_PIXEL_FMT_GRBG8:
case MP_PIXEL_FMT_RGGB8:
return 8;
case MP_PIXEL_FMT_BGGR10P:
case MP_PIXEL_FMT_GBRG10P:
case MP_PIXEL_FMT_GRBG10P:
case MP_PIXEL_FMT_RGGB10P:
return 10;
case MP_PIXEL_FMT_UYVY:
case MP_PIXEL_FMT_YUYV:
return 16;
default:
return 0;
}
}
uint32_t
mp_pixel_format_pixel_depth(MPPixelFormat pixel_format)
{
g_return_val_if_fail(pixel_format < MP_PIXEL_FMT_MAX, 0);
switch (pixel_format) {
case MP_PIXEL_FMT_BGGR8:
case MP_PIXEL_FMT_GBRG8:
case MP_PIXEL_FMT_GRBG8:
case MP_PIXEL_FMT_RGGB8:
case MP_PIXEL_FMT_UYVY:
case MP_PIXEL_FMT_YUYV:
return 8;
case MP_PIXEL_FMT_GBRG10P:
case MP_PIXEL_FMT_GRBG10P:
case MP_PIXEL_FMT_RGGB10P:
case MP_PIXEL_FMT_BGGR10P:
return 10;
default:
return 0;
}
}
uint32_t
mp_pixel_format_width_to_bytes(MPPixelFormat pixel_format, uint32_t width)
{
uint32_t bits_per_pixel = mp_pixel_format_bits_per_pixel(pixel_format);
uint64_t bits_per_width = width * (uint64_t)bits_per_pixel;
uint64_t remainder = bits_per_width % 8;
if (remainder == 0)
return bits_per_width / 8;
return (bits_per_width + 8 - remainder) / 8;
}
uint32_t
mp_pixel_format_width_to_colors(MPPixelFormat pixel_format, uint32_t width)
{
g_return_val_if_fail(pixel_format < MP_PIXEL_FMT_MAX, 0);
switch (pixel_format) {
case MP_PIXEL_FMT_BGGR8:
case MP_PIXEL_FMT_GBRG8:
case MP_PIXEL_FMT_GRBG8:
case MP_PIXEL_FMT_RGGB8:
return width / 2;
case MP_PIXEL_FMT_BGGR10P:
case MP_PIXEL_FMT_GBRG10P:
case MP_PIXEL_FMT_GRBG10P:
case MP_PIXEL_FMT_RGGB10P:
return width / 2 * 5;
case MP_PIXEL_FMT_UYVY:
case MP_PIXEL_FMT_YUYV:
return width;
default:
return 0;
}
}
uint32_t
mp_pixel_format_height_to_colors(MPPixelFormat pixel_format, uint32_t height)
{
g_return_val_if_fail(pixel_format < MP_PIXEL_FMT_MAX, 0);
switch (pixel_format) {
case MP_PIXEL_FMT_BGGR8:
case MP_PIXEL_FMT_GBRG8:
case MP_PIXEL_FMT_GRBG8:
case MP_PIXEL_FMT_RGGB8:
case MP_PIXEL_FMT_BGGR10P:
case MP_PIXEL_FMT_GBRG10P:
case MP_PIXEL_FMT_GRBG10P:
case MP_PIXEL_FMT_RGGB10P:
return height / 2;
case MP_PIXEL_FMT_UYVY:
case MP_PIXEL_FMT_YUYV:
return height;
default:
return 0;
}
}
bool
mp_camera_mode_is_equivalent(const MPCameraMode *m1, const MPCameraMode *m2)
{
return m1->pixel_format == m2->pixel_format &&
m1->frame_interval.numerator == m2->frame_interval.numerator &&
m1->frame_interval.denominator == m2->frame_interval.denominator &&
m1->width == m2->width && m1->height == m2->height;
}
static void
errno_printerr(const char *s)
{
g_printerr("MPCamera: %s error %d, %s\n", s, errno, strerror(errno));
}
static int
xioctl(int fd, int request, void *arg)
{
int r;
do {
r = ioctl(fd, request, arg);
} while (r == -1 && errno == EINTR);
return r;
}
struct video_buffer {
uint32_t length;
uint8_t *data;
int fd;
};
struct _MPCamera {
int video_fd;
int subdev_fd;
bool has_set_mode;
MPCameraMode current_mode;
struct video_buffer buffers[MAX_VIDEO_BUFFERS];
uint32_t num_buffers;
// keeping track of background task child-PIDs for cleanup code
int child_bg_pids[MAX_BG_TASKS];
bool use_mplane;
};
MPCamera *
mp_camera_new(int video_fd, int subdev_fd)
{
g_return_val_if_fail(video_fd != -1, NULL);
// Query capabilities
struct v4l2_capability cap;
if (xioctl(video_fd, VIDIOC_QUERYCAP, &cap) == -1) {
return NULL;
}
// Check whether this is a video capture device
bool use_mplane;
if (cap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE) {
use_mplane = true;
printf("!!\n");
} else if (cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) {
use_mplane = false;
} else {
return NULL;
}
MPCamera *camera = malloc(sizeof(MPCamera));
camera->video_fd = video_fd;
camera->subdev_fd = subdev_fd;
camera->has_set_mode = false;
camera->num_buffers = 0;
camera->use_mplane = use_mplane;
memset(camera->child_bg_pids,
0,
sizeof(camera->child_bg_pids[0]) * MAX_BG_TASKS);
return camera;
}
void
mp_camera_free(MPCamera *camera)
{
mp_camera_wait_bg_tasks(camera);
g_warn_if_fail(camera->num_buffers == 0);
if (camera->num_buffers != 0) {
mp_camera_stop_capture(camera);
}
free(camera);
}
void
mp_camera_add_bg_task(MPCamera *camera, pid_t pid)
{
int status;
while (true) {
for (size_t i = 0; i < MAX_BG_TASKS; ++i) {
if (camera->child_bg_pids[i] == 0) {
camera->child_bg_pids[i] = pid;
return;
} else {
// error == -1, still running == 0
if (waitpid(camera->child_bg_pids[i],
&status,
WNOHANG) <= 0)
continue; // consider errored wait still
// running
if (WIFEXITED(status)) {
// replace exited
camera->child_bg_pids[i] = pid;
return;
}
}
}
// wait for any status change on child processes
pid_t changed = waitpid(-1, &status, 0);
if (WIFEXITED(status)) {
// some child exited
for (size_t i = 0; i < MAX_BG_TASKS; ++i) {
if (camera->child_bg_pids[i] == changed) {
camera->child_bg_pids[i] = pid;
return;
}
}
}
// no luck, repeat and check if something exited maybe
}
}
void
mp_camera_wait_bg_tasks(MPCamera *camera)
{
for (size_t i = 0; i < MAX_BG_TASKS; ++i) {
if (camera->child_bg_pids[i] != 0) {
// ignore errors
waitpid(camera->child_bg_pids[i], NULL, 0);
}
}
}
bool
mp_camera_check_task_complete(MPCamera *camera, pid_t pid)
{
// this method is potentially unsafe because pid could already be reused at
// this point, but extremely unlikely so we won't implement this.
int status;
if (pid == 0)
return true;
// ignore errors (-1), no exit == 0
int pidchange = waitpid(pid, &status, WNOHANG);
if (pidchange == -1) // error or exists and runs
return false;
if (WIFEXITED(status)) {
for (size_t i = 0; i < MAX_BG_TASKS; ++i) {
if (camera->child_bg_pids[i] == pid) {
camera->child_bg_pids[i] = 0;
break;
}
}
return true;
} else {
return false;
}
}
bool
mp_camera_is_subdev(MPCamera *camera)
{
return camera->subdev_fd != -1;
}
int
mp_camera_get_video_fd(MPCamera *camera)
{
return camera->video_fd;
}
int
mp_camera_get_subdev_fd(MPCamera *camera)
{
return camera->subdev_fd;
}
static bool
camera_mode_impl(MPCamera *camera, int request, MPCameraMode *mode)
{
uint32_t pixfmt = mp_pixel_format_from_v4l_pixel_format(mode->pixel_format);
struct v4l2_format fmt = {};
if (camera->use_mplane) {
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
fmt.fmt.pix_mp.width = mode->width;
fmt.fmt.pix_mp.height = mode->height;
fmt.fmt.pix_mp.pixelformat = pixfmt;
fmt.fmt.pix_mp.field = V4L2_FIELD_ANY;
} else {
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
fmt.fmt.pix.width = mode->width;
fmt.fmt.pix.height = mode->height;
fmt.fmt.pix.pixelformat = pixfmt;
fmt.fmt.pix.field = V4L2_FIELD_ANY;
}
if (xioctl(camera->video_fd, request, &fmt) == -1) {
return false;
}
if (camera->use_mplane) {
mode->width = fmt.fmt.pix_mp.width;
mode->height = fmt.fmt.pix_mp.height;
mode->pixel_format = mp_pixel_format_from_v4l_pixel_format(
fmt.fmt.pix_mp.pixelformat);
} else {
mode->width = fmt.fmt.pix.width;
mode->height = fmt.fmt.pix.height;
mode->pixel_format = mp_pixel_format_from_v4l_pixel_format(
fmt.fmt.pix.pixelformat);
}
return true;
}
bool
mp_camera_try_mode(MPCamera *camera, MPCameraMode *mode)
{
if (!camera_mode_impl(camera, VIDIOC_TRY_FMT, mode)) {
errno_printerr("VIDIOC_S_FMT");
return false;
}
return true;
}
const MPCameraMode *
mp_camera_get_mode(const MPCamera *camera)
{
return &camera->current_mode;
}
bool
mp_camera_set_mode(MPCamera *camera, MPCameraMode *mode)
{
// Set the mode in the subdev the camera is one
if (mp_camera_is_subdev(camera)) {
struct v4l2_subdev_frame_interval interval = {};
interval.pad = 0;
interval.interval = mode->frame_interval;
if (xioctl(camera->subdev_fd,
VIDIOC_SUBDEV_S_FRAME_INTERVAL,
&interval) == -1) {
errno_printerr("VIDIOC_SUBDEV_S_FRAME_INTERVAL");
return false;
}
bool did_set_frame_rate = interval.interval.numerator ==
mode->frame_interval.numerator &&
interval.interval.denominator ==
mode->frame_interval.denominator;
struct v4l2_subdev_format fmt = {};
fmt.pad = 0;
fmt.which = V4L2_SUBDEV_FORMAT_ACTIVE;
fmt.format.width = mode->width;
fmt.format.height = mode->height;
fmt.format.code =
mp_pixel_format_to_v4l_bus_code(mode->pixel_format);
fmt.format.field = V4L2_FIELD_ANY;
if (xioctl(camera->subdev_fd, VIDIOC_SUBDEV_S_FMT, &fmt) == -1) {
errno_printerr("VIDIOC_SUBDEV_S_FMT");
return false;
}
// Some drivers like ov5640 don't allow you to set the frame format
// with too high a frame-rate, but that means the frame-rate won't be
// set after the format change. So we need to try again here if we
// didn't succeed before. Ideally we'd be able to set both at once.
if (!did_set_frame_rate) {
interval.interval = mode->frame_interval;
if (xioctl(camera->subdev_fd,
VIDIOC_SUBDEV_S_FRAME_INTERVAL,
&interval) == -1) {
errno_printerr("VIDIOC_SUBDEV_S_FRAME_INTERVAL");
}
}
// Update the mode
mode->pixel_format =
mp_pixel_format_from_v4l_bus_code(fmt.format.code);
mode->frame_interval = interval.interval;
mode->width = fmt.format.width;
mode->height = fmt.format.height;
}
// Set the mode for the video device
{
if (!camera_mode_impl(camera, VIDIOC_S_FMT, mode)) {
errno_printerr("VIDIOC_S_FMT");
return false;
}
}
camera->has_set_mode = true;
camera->current_mode = *mode;
return true;
}
bool
mp_camera_start_capture(MPCamera *camera)
{
g_return_val_if_fail(camera->has_set_mode, false);
g_return_val_if_fail(camera->num_buffers == 0, false);
enum v4l2_buf_type buftype = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (camera->use_mplane) {
buftype = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
}
// Start by requesting buffers
struct v4l2_requestbuffers req = {};
req.count = MAX_VIDEO_BUFFERS;
req.type = buftype;
req.memory = V4L2_MEMORY_MMAP;
if (xioctl(camera->video_fd, VIDIOC_REQBUFS, &req) == -1) {
errno_printerr("VIDIOC_REQBUFS");
return false;
}
if (req.count < 2) {
g_printerr(
"Insufficient buffer memory. Only %d buffers available.\n",
req.count);
goto error;
}
for (uint32_t i = 0; i < req.count; ++i) {
// Query each buffer and mmap it
struct v4l2_buffer buf = {
.type = buftype,
.memory = V4L2_MEMORY_MMAP,
.index = i,
};
struct v4l2_plane planes[1];
if (camera->use_mplane) {
buf.m.planes = planes;
buf.length = 1;
}
if (xioctl(camera->video_fd, VIDIOC_QUERYBUF, &buf) == -1) {
errno_printerr("VIDIOC_QUERYBUF");
break;
}
if (camera->use_mplane) {
camera->buffers[i].length = planes[0].length;
camera->buffers[i].data = mmap(NULL,
planes[0].length,
PROT_READ,
MAP_SHARED,
camera->video_fd,
planes[0].m.mem_offset);
} else {
camera->buffers[i].length = buf.length;
camera->buffers[i].data = mmap(NULL,
buf.length,
PROT_READ,
MAP_SHARED,
camera->video_fd,
buf.m.offset);
}
if (camera->buffers[i].data == MAP_FAILED) {
errno_printerr("mmap");
break;
}
struct v4l2_exportbuffer expbuf = {
.type = V4L2_BUF_TYPE_VIDEO_CAPTURE,
.index = i,
};
if (xioctl(camera->video_fd, VIDIOC_EXPBUF, &expbuf) == -1) {
errno_printerr("VIDIOC_EXPBUF");
break;
}
camera->buffers[i].fd = expbuf.fd;
++camera->num_buffers;
}
if (camera->num_buffers != req.count) {
g_printerr("Unable to map all buffers\n");
goto error;
}
for (uint32_t i = 0; i < camera->num_buffers; ++i) {
struct v4l2_buffer buf = {
.type = buftype,
.memory = V4L2_MEMORY_MMAP,
.index = i,
};
struct v4l2_plane planes[1];
if (camera->use_mplane) {
buf.m.planes = planes;
buf.length = 1;
}
// Queue the buffer for capture
if (xioctl(camera->video_fd, VIDIOC_QBUF, &buf) == -1) {
errno_printerr("VIDIOC_QBUF");
goto error;
}
}
// Start capture
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (xioctl(camera->video_fd, VIDIOC_STREAMON, &type) == -1) {
errno_printerr("VIDIOC_STREAMON");
goto error;
}
return true;
error:
// Unmap any mapped buffers
assert(camera->num_buffers <= MAX_VIDEO_BUFFERS);
for (uint32_t i = 0; i < camera->num_buffers; ++i) {
if (munmap(camera->buffers[i].data, camera->buffers[i].length) ==
-1) {
errno_printerr("munmap");
}
if (close(camera->buffers[i].fd) == -1) {
errno_printerr("close");
}
}
// Reset allocated buffers
{
struct v4l2_requestbuffers req = {};
req.count = 0;
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory = V4L2_MEMORY_MMAP;
if (xioctl(camera->video_fd, VIDIOC_REQBUFS, &req) == -1) {
errno_printerr("VIDIOC_REQBUFS");
}
}
return false;
}
bool
mp_camera_stop_capture(MPCamera *camera)
{
g_return_val_if_fail(camera->num_buffers > 0, false);
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (xioctl(camera->video_fd, VIDIOC_STREAMOFF, &type) == -1) {
errno_printerr("VIDIOC_STREAMOFF");
}
assert(camera->num_buffers <= MAX_VIDEO_BUFFERS);
for (int i = 0; i < camera->num_buffers; ++i) {
if (munmap(camera->buffers[i].data, camera->buffers[i].length) ==
-1) {
errno_printerr("munmap");
}
if (close(camera->buffers[i].fd) == -1) {
errno_printerr("close");
}
}
camera->num_buffers = 0;
struct v4l2_requestbuffers req = {};
req.count = 0;
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory = V4L2_MEMORY_MMAP;
if (xioctl(camera->video_fd, VIDIOC_REQBUFS, &req) == -1) {
errno_printerr("VIDIOC_REQBUFS");
}
return true;
}
bool
mp_camera_is_capturing(MPCamera *camera)
{
return camera->num_buffers > 0;
}
bool
mp_camera_capture_buffer(MPCamera *camera, MPBuffer *buffer)
{
struct v4l2_buffer buf = {};
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
struct v4l2_plane planes[1];
if (camera->use_mplane) {
buf.m.planes = planes;
buf.length = 1;
}
if (xioctl(camera->video_fd, VIDIOC_DQBUF, &buf) == -1) {
switch (errno) {
case EAGAIN:
return true;
case EIO:
/* Could ignore EIO, see spec. */
/* fallthrough */
default:
errno_printerr("VIDIOC_DQBUF");
exit(1);
return false;
}
}
uint32_t pixel_format = camera->current_mode.pixel_format;
uint32_t width = camera->current_mode.width;
uint32_t height = camera->current_mode.height;
uint32_t bytesused;
if (camera->use_mplane) {
bytesused = planes[0].bytesused;
} else {
bytesused = buf.bytesused;
}
assert(bytesused ==
mp_pixel_format_width_to_bytes(pixel_format, width) * height);
assert(bytesused == camera->buffers[buf.index].length);
buffer->index = buf.index;
buffer->data = camera->buffers[buf.index].data;
buffer->fd = camera->buffers[buf.index].fd;
return true;
}
bool
mp_camera_release_buffer(MPCamera *camera, uint32_t buffer_index)
{
struct v4l2_buffer buf = {};
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = buffer_index;
if (xioctl(camera->video_fd, VIDIOC_QBUF, &buf) == -1) {
errno_printerr("VIDIOC_QBUF");
return false;
}
return true;
}
struct _MPCameraModeList {
MPCameraMode mode;
MPCameraModeList *next;
};
static MPCameraModeList *
get_subdev_modes(MPCamera *camera, bool (*check)(MPCamera *, MPCameraMode *))
{
MPCameraModeList *item = NULL;
for (uint32_t fmt_index = 0;; ++fmt_index) {
struct v4l2_subdev_mbus_code_enum fmt = {};
fmt.index = fmt_index;
fmt.pad = 0;
fmt.which = V4L2_SUBDEV_FORMAT_TRY;
if (xioctl(camera->subdev_fd, VIDIOC_SUBDEV_ENUM_MBUS_CODE, &fmt) ==
-1) {
if (errno != EINVAL) {
errno_printerr("VIDIOC_SUBDEV_ENUM_MBUS_CODE");
}
break;
}
// Skip unsupported formats
uint32_t format = mp_pixel_format_from_v4l_bus_code(fmt.code);
if (format == MP_PIXEL_FMT_UNSUPPORTED) {
continue;
}
for (uint32_t frame_index = 0;; ++frame_index) {
struct v4l2_subdev_frame_size_enum frame = {};
frame.index = frame_index;
frame.pad = 0;
frame.code = fmt.code;
frame.which = V4L2_SUBDEV_FORMAT_TRY;
if (xioctl(camera->subdev_fd,
VIDIOC_SUBDEV_ENUM_FRAME_SIZE,
&frame) == -1) {
if (errno != EINVAL) {
errno_printerr(
"VIDIOC_SUBDEV_ENUM_FRAME_SIZE");
}
break;
}
// TODO: Handle other types
if (frame.min_width != frame.max_width ||
frame.min_height != frame.max_height) {
break;
}
for (uint32_t interval_index = 0;; ++interval_index) {
struct v4l2_subdev_frame_interval_enum interval = {};
interval.index = interval_index;
interval.pad = 0;
interval.code = fmt.code;
interval.width = frame.max_width;
interval.height = frame.max_height;
interval.which = V4L2_SUBDEV_FORMAT_TRY;
if (xioctl(camera->subdev_fd,
VIDIOC_SUBDEV_ENUM_FRAME_INTERVAL,
&interval) == -1) {
if (errno != EINVAL) {
errno_printerr(
"VIDIOC_SUBDEV_ENUM_FRAME_INTERVAL");
}
break;
}
MPCameraMode mode = {
.pixel_format = format,
.frame_interval = interval.interval,
.width = frame.max_width,
.height = frame.max_height,
};
if (!check(camera, &mode)) {
continue;
}
MPCameraModeList *new_item =
malloc(sizeof(MPCameraModeList));
new_item->mode = mode;
new_item->next = item;
item = new_item;
}
}
}
return item;
}
static MPCameraModeList *
get_video_modes(MPCamera *camera, bool (*check)(MPCamera *, MPCameraMode *))
{
MPCameraModeList *item = NULL;
for (uint32_t fmt_index = 0;; ++fmt_index) {
struct v4l2_fmtdesc fmt = {};
fmt.index = fmt_index;
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (xioctl(camera->video_fd, VIDIOC_ENUM_FMT, &fmt) == -1) {
if (errno != EINVAL) {
errno_printerr("VIDIOC_ENUM_FMT");
}
break;
}
// Skip unsupported formats
uint32_t format =
mp_pixel_format_from_v4l_pixel_format(fmt.pixelformat);
if (format == MP_PIXEL_FMT_UNSUPPORTED) {
continue;
}
for (uint32_t frame_index = 0;; ++frame_index) {
struct v4l2_frmsizeenum frame = {};
frame.index = frame_index;
frame.pixel_format = fmt.pixelformat;
if (xioctl(camera->video_fd,
VIDIOC_ENUM_FRAMESIZES,
&frame) == -1) {
if (errno != EINVAL) {
errno_printerr("VIDIOC_ENUM_FRAMESIZES");
}
break;
}
// TODO: Handle other types
if (frame.type != V4L2_FRMSIZE_TYPE_DISCRETE) {
break;
}
for (uint32_t interval_index = 0;; ++interval_index) {
struct v4l2_frmivalenum interval = {};
interval.index = interval_index;
interval.pixel_format = fmt.pixelformat;
interval.width = frame.discrete.width;
interval.height = frame.discrete.height;
if (xioctl(camera->video_fd,
VIDIOC_ENUM_FRAMEINTERVALS,
&interval) == -1) {
if (errno != EINVAL) {
errno_printerr(
"VIDIOC_ENUM_FRAMESIZES");
}
break;
}
// TODO: Handle other types
if (interval.type != V4L2_FRMIVAL_TYPE_DISCRETE) {
break;
}
MPCameraMode mode = {
.pixel_format = format,
.frame_interval = interval.discrete,
.width = frame.discrete.width,
.height = frame.discrete.height,
};
if (!check(camera, &mode)) {
continue;
}
MPCameraModeList *new_item =
malloc(sizeof(MPCameraModeList));
new_item->mode = mode;
new_item->next = item;
item = new_item;
}
}
}
return item;
}
static bool
all_modes(MPCamera *camera, MPCameraMode *mode)
{
return true;
}
static bool
available_modes(MPCamera *camera, MPCameraMode *mode)
{
MPCameraMode attempt = *mode;
return mp_camera_try_mode(camera, &attempt) &&
mp_camera_mode_is_equivalent(mode, &attempt);
}
MPCameraModeList *
mp_camera_list_supported_modes(MPCamera *camera)
{
if (mp_camera_is_subdev(camera)) {
return get_subdev_modes(camera, all_modes);
} else {
return get_video_modes(camera, all_modes);
}
}
MPCameraModeList *
mp_camera_list_available_modes(MPCamera *camera)
{
if (mp_camera_is_subdev(camera)) {
return get_subdev_modes(camera, available_modes);
} else {
return get_video_modes(camera, available_modes);
}
}
MPCameraMode *
mp_camera_mode_list_get(MPCameraModeList *list)
{
g_return_val_if_fail(list, NULL);
return &list->mode;
}
MPCameraModeList *
mp_camera_mode_list_next(MPCameraModeList *list)
{
g_return_val_if_fail(list, NULL);
return list->next;
}
void
mp_camera_mode_list_free(MPCameraModeList *list)
{
while (list) {
MPCameraModeList *tmp = list;
list = tmp->next;
free(tmp);
}
}
struct int_str_pair {
uint32_t value;
const char *str;
};
struct int_str_pair control_id_names[] = {
{ V4L2_CID_BRIGHTNESS, "BRIGHTNESS" },
{ V4L2_CID_CONTRAST, "CONTRAST" },
{ V4L2_CID_SATURATION, "SATURATION" },
{ V4L2_CID_HUE, "HUE" },
{ V4L2_CID_AUDIO_VOLUME, "AUDIO_VOLUME" },
{ V4L2_CID_AUDIO_BALANCE, "AUDIO_BALANCE" },
{ V4L2_CID_AUDIO_BASS, "AUDIO_BASS" },
{ V4L2_CID_AUDIO_TREBLE, "AUDIO_TREBLE" },
{ V4L2_CID_AUDIO_MUTE, "AUDIO_MUTE" },
{ V4L2_CID_AUDIO_LOUDNESS, "AUDIO_LOUDNESS" },
{ V4L2_CID_BLACK_LEVEL, "BLACK_LEVEL" },
{ V4L2_CID_AUTO_WHITE_BALANCE, "AUTO_WHITE_BALANCE" },
{ V4L2_CID_DO_WHITE_BALANCE, "DO_WHITE_BALANCE" },
{ V4L2_CID_RED_BALANCE, "RED_BALANCE" },
{ V4L2_CID_BLUE_BALANCE, "BLUE_BALANCE" },
{ V4L2_CID_GAMMA, "GAMMA" },
{ V4L2_CID_WHITENESS, "WHITENESS" },
{ V4L2_CID_EXPOSURE, "EXPOSURE" },
{ V4L2_CID_AUTOGAIN, "AUTOGAIN" },
{ V4L2_CID_GAIN, "GAIN" },
{ V4L2_CID_HFLIP, "HFLIP" },
{ V4L2_CID_VFLIP, "VFLIP" },
{ V4L2_CID_POWER_LINE_FREQUENCY, "POWER_LINE_FREQUENCY" },
{ V4L2_CID_HUE_AUTO, "HUE_AUTO" },
{ V4L2_CID_WHITE_BALANCE_TEMPERATURE, "WHITE_BALANCE_TEMPERATURE" },
{ V4L2_CID_SHARPNESS, "SHARPNESS" },
{ V4L2_CID_BACKLIGHT_COMPENSATION, "BACKLIGHT_COMPENSATION" },
{ V4L2_CID_CHROMA_AGC, "CHROMA_AGC" },
{ V4L2_CID_COLOR_KILLER, "COLOR_KILLER" },
{ V4L2_CID_COLORFX, "COLORFX" },
{ V4L2_CID_AUTOBRIGHTNESS, "AUTOBRIGHTNESS" },
{ V4L2_CID_BAND_STOP_FILTER, "BAND_STOP_FILTER" },
{ V4L2_CID_ROTATE, "ROTATE" },
{ V4L2_CID_BG_COLOR, "BG_COLOR" },
{ V4L2_CID_CHROMA_GAIN, "CHROMA_GAIN" },
{ V4L2_CID_ILLUMINATORS_1, "ILLUMINATORS_1" },
{ V4L2_CID_ILLUMINATORS_2, "ILLUMINATORS_2" },
{ V4L2_CID_MIN_BUFFERS_FOR_CAPTURE, "MIN_BUFFERS_FOR_CAPTURE" },
{ V4L2_CID_MIN_BUFFERS_FOR_OUTPUT, "MIN_BUFFERS_FOR_OUTPUT" },
{ V4L2_CID_ALPHA_COMPONENT, "ALPHA_COMPONENT" },
{ V4L2_CID_COLORFX_CBCR, "COLORFX_CBCR" },
{ V4L2_CID_LASTP1, "LASTP1" },
{ V4L2_CID_USER_MEYE_BASE, "USER_MEYE_BASE" },
{ V4L2_CID_USER_BTTV_BASE, "USER_BTTV_BASE" },
{ V4L2_CID_USER_S2255_BASE, "USER_S2255_BASE" },
{ V4L2_CID_USER_SI476X_BASE, "USER_SI476X_BASE" },
{ V4L2_CID_USER_TI_VPE_BASE, "USER_TI_VPE_BASE" },
{ V4L2_CID_USER_SAA7134_BASE, "USER_SAA7134_BASE" },
{ V4L2_CID_USER_ADV7180_BASE, "USER_ADV7180_BASE" },
{ V4L2_CID_USER_TC358743_BASE, "USER_TC358743_BASE" },
{ V4L2_CID_USER_MAX217X_BASE, "USER_MAX217X_BASE" },
{ V4L2_CID_USER_IMX_BASE, "USER_IMX_BASE" },
// { V4L2_CID_USER_ATMEL_ISC_BASE, "USER_ATMEL_ISC_BASE" },
{ V4L2_CID_CAMERA_CLASS_BASE, "CAMERA_CLASS_BASE" },
{ V4L2_CID_CAMERA_CLASS, "CAMERA_CLASS" },
{ V4L2_CID_EXPOSURE_AUTO, "EXPOSURE_AUTO" },
{ V4L2_CID_EXPOSURE_ABSOLUTE, "EXPOSURE_ABSOLUTE" },
{ V4L2_CID_EXPOSURE_AUTO_PRIORITY, "EXPOSURE_AUTO_PRIORITY" },
{ V4L2_CID_PAN_RELATIVE, "PAN_RELATIVE" },
{ V4L2_CID_TILT_RELATIVE, "TILT_RELATIVE" },
{ V4L2_CID_PAN_RESET, "PAN_RESET" },
{ V4L2_CID_TILT_RESET, "TILT_RESET" },
{ V4L2_CID_PAN_ABSOLUTE, "PAN_ABSOLUTE" },
{ V4L2_CID_TILT_ABSOLUTE, "TILT_ABSOLUTE" },
{ V4L2_CID_FOCUS_ABSOLUTE, "FOCUS_ABSOLUTE" },
{ V4L2_CID_FOCUS_RELATIVE, "FOCUS_RELATIVE" },
{ V4L2_CID_FOCUS_AUTO, "FOCUS_AUTO" },
{ V4L2_CID_ZOOM_ABSOLUTE, "ZOOM_ABSOLUTE" },
{ V4L2_CID_ZOOM_RELATIVE, "ZOOM_RELATIVE" },
{ V4L2_CID_ZOOM_CONTINUOUS, "ZOOM_CONTINUOUS" },
{ V4L2_CID_PRIVACY, "PRIVACY" },
{ V4L2_CID_IRIS_ABSOLUTE, "IRIS_ABSOLUTE" },
{ V4L2_CID_IRIS_RELATIVE, "IRIS_RELATIVE" },
{ V4L2_CID_AUTO_EXPOSURE_BIAS, "AUTO_EXPOSURE_BIAS" },
{ V4L2_CID_AUTO_N_PRESET_WHITE_BALANCE, "AUTO_N_PRESET_WHITE_BALANCE" },
{ V4L2_CID_WIDE_DYNAMIC_RANGE, "WIDE_DYNAMIC_RANGE" },
{ V4L2_CID_IMAGE_STABILIZATION, "IMAGE_STABILIZATION" },
{ V4L2_CID_ISO_SENSITIVITY, "ISO_SENSITIVITY" },
{ V4L2_CID_ISO_SENSITIVITY_AUTO, "ISO_SENSITIVITY_AUTO" },
{ V4L2_CID_EXPOSURE_METERING, "EXPOSURE_METERING" },
{ V4L2_CID_SCENE_MODE, "SCENE_MODE" },
{ V4L2_CID_3A_LOCK, "3A_LOCK" },
{ V4L2_CID_AUTO_FOCUS_START, "AUTO_FOCUS_START" },
{ V4L2_CID_AUTO_FOCUS_STOP, "AUTO_FOCUS_STOP" },
{ V4L2_CID_AUTO_FOCUS_STATUS, "AUTO_FOCUS_STATUS" },
{ V4L2_CID_AUTO_FOCUS_RANGE, "AUTO_FOCUS_RANGE" },
{ V4L2_CID_PAN_SPEED, "PAN_SPEED" },
{ V4L2_CID_TILT_SPEED, "TILT_SPEED" },
// { V4L2_CID_CAMERA_ORIENTATION, "CAMERA_ORIENTATION" },
// { V4L2_CID_CAMERA_SENSOR_ROTATION, "CAMERA_SENSOR_ROTATION" },
{ V4L2_CID_FLASH_LED_MODE, "FLASH_LED_MODE" },
{ V4L2_CID_FLASH_STROBE_SOURCE, "FLASH_STROBE_SOURCE" },
{ V4L2_CID_FLASH_STROBE, "FLASH_STROBE" },
{ V4L2_CID_FLASH_STROBE_STOP, "FLASH_STROBE_STOP" },
{ V4L2_CID_FLASH_STROBE_STATUS, "FLASH_STROBE_STATUS" },
{ V4L2_CID_FLASH_TIMEOUT, "FLASH_TIMEOUT" },
{ V4L2_CID_FLASH_INTENSITY, "FLASH_INTENSITY" },
{ V4L2_CID_FLASH_TORCH_INTENSITY, "FLASH_TORCH_INTENSITY" },
{ V4L2_CID_FLASH_INDICATOR_INTENSITY, "FLASH_INDICATOR_INTENSITY" },
{ V4L2_CID_FLASH_FAULT, "FLASH_FAULT" },
{ V4L2_CID_FLASH_CHARGE, "FLASH_CHARGE" },
{ V4L2_CID_FLASH_READY, "FLASH_READY" },
};
const char *
mp_control_id_to_str(uint32_t id)
{
size_t size = sizeof(control_id_names) / sizeof(*control_id_names);
for (size_t i = 0; i < size; ++i) {
if (control_id_names[i].value == id) {
return control_id_names[i].str;
}
}
return "UNKNOWN";
}
struct int_str_pair control_type_names[] = {
{ V4L2_CTRL_TYPE_INTEGER, "INTEGER" },
{ V4L2_CTRL_TYPE_BOOLEAN, "BOOLEAN" },
{ V4L2_CTRL_TYPE_MENU, "MENU" },
{ V4L2_CTRL_TYPE_INTEGER_MENU, "INTEGER_MENU" },
{ V4L2_CTRL_TYPE_BITMASK, "BITMASK" },
{ V4L2_CTRL_TYPE_BUTTON, "BUTTON" },
{ V4L2_CTRL_TYPE_INTEGER64, "INTEGER64" },
{ V4L2_CTRL_TYPE_STRING, "STRING" },
{ V4L2_CTRL_TYPE_CTRL_CLASS, "CTRL_CLASS" },
{ V4L2_CTRL_TYPE_U8, "U8" },
{ V4L2_CTRL_TYPE_U16, "U16" },
{ V4L2_CTRL_TYPE_U32, "U32" },
// { V4L2_CTRL_TYPE_MPEG2_SLICE_PARAMS, "MPEG2_SLICE_PARAMS" },
// { V4L2_CTRL_TYPE_MPEG2_QUANTIZATION, "MPEG2_QUANTIZATION" },
// { V4L2_CTRL_TYPE_AREA, "AREA" },
// { V4L2_CTRL_TYPE_H264_SPS, "H264_SPS" },
// { V4L2_CTRL_TYPE_H264_PPS, "H264_PPS" },
// { V4L2_CTRL_TYPE_H264_SCALING_MATRIX, "H264_SCALING_MATRIX" },
// { V4L2_CTRL_TYPE_H264_SLICE_PARAMS, "H264_SLICE_PARAMS" },
// { V4L2_CTRL_TYPE_H264_DECODE_PARAMS, "H264_DECODE_PARAMS" },
// { V4L2_CTRL_TYPE_HEVC_SPS, "HEVC_SPS" },
// { V4L2_CTRL_TYPE_HEVC_PPS, "HEVC_PPS" },
// { V4L2_CTRL_TYPE_HEVC_SLICE_PARAMS, "HEVC_SLICE_PARAMS" },
};
const char *
mp_control_type_to_str(uint32_t type)
{
size_t size = sizeof(control_type_names) / sizeof(*control_type_names);
for (size_t i = 0; i < size; ++i) {
if (control_type_names[i].value == type) {
return control_type_names[i].str;
}
}
return "UNKNOWN";
}
struct _MPControlList {
MPControl control;
MPControlList *next;
};
static int
control_fd(MPCamera *camera)
{
if (camera->subdev_fd != -1) {
return camera->subdev_fd;
}
return camera->video_fd;
}
MPControlList *
mp_camera_list_controls(MPCamera *camera)
{
MPControlList *item = NULL;
struct v4l2_query_ext_ctrl ctrl = {};
ctrl.id = V4L2_CTRL_FLAG_NEXT_CTRL | V4L2_CTRL_FLAG_NEXT_COMPOUND;
while (true) {
if (xioctl(control_fd(camera), VIDIOC_QUERY_EXT_CTRL, &ctrl) == -1) {
if (errno != EINVAL) {
errno_printerr("VIDIOC_QUERY_EXT_CTRL");
}
break;
}
MPControl control = {
.id = ctrl.id,
.type = ctrl.type,
.name = {},
.min = ctrl.minimum,
.max = ctrl.maximum,
.step = ctrl.step,
.default_value = ctrl.default_value,
.flags = ctrl.flags,
.element_size = ctrl.elem_size,
.element_count = ctrl.elems,
.dimensions_count = ctrl.nr_of_dims,
.dimensions = {},
};
strcpy(control.name, ctrl.name);
memcpy(control.dimensions,
ctrl.dims,
sizeof(uint32_t) * V4L2_CTRL_MAX_DIMS);
MPControlList *new_item = malloc(sizeof(MPControlList));
new_item->control = control;
new_item->next = item;
item = new_item;
ctrl.id |= V4L2_CTRL_FLAG_NEXT_CTRL | V4L2_CTRL_FLAG_NEXT_COMPOUND;
}
return item;
}
MPControl *
mp_control_list_get(MPControlList *list)
{
g_return_val_if_fail(list, NULL);
return &list->control;
}
MPControlList *
mp_control_list_next(MPControlList *list)
{
g_return_val_if_fail(list, NULL);
return list->next;
}
void
mp_control_list_free(MPControlList *list)
{
while (list) {
MPControlList *tmp = list;
list = tmp->next;
free(tmp);
}
}
bool
mp_camera_query_control(MPCamera *camera, uint32_t id, MPControl *control)
{
struct v4l2_query_ext_ctrl ctrl = {};
ctrl.id = id;
if (xioctl(control_fd(camera), VIDIOC_QUERY_EXT_CTRL, &ctrl) == -1) {
if (errno != EINVAL) {
errno_printerr("VIDIOC_QUERY_EXT_CTRL");
}
return false;
}
if (control) {
control->id = ctrl.id;
control->type = ctrl.type;
strcpy(control->name, ctrl.name);
control->min = ctrl.minimum;
control->max = ctrl.maximum;
control->step = ctrl.step;
control->default_value = ctrl.default_value;
control->flags = ctrl.flags;
control->element_size = ctrl.elem_size;
control->element_count = ctrl.elems;
control->dimensions_count = ctrl.nr_of_dims;
memcpy(control->dimensions,
ctrl.dims,
sizeof(uint32_t) * V4L2_CTRL_MAX_DIMS);
}
return true;
}
static bool
control_impl_int32(MPCamera *camera, uint32_t id, int request, int32_t *value)
{
struct v4l2_ext_control ctrl = {};
ctrl.id = id;
ctrl.value = *value;
struct v4l2_ext_controls ctrls = {
.ctrl_class = 0,
.which = V4L2_CTRL_WHICH_CUR_VAL,
.count = 1,
.controls = &ctrl,
};
if (xioctl(control_fd(camera), request, &ctrls) == -1) {
return false;
}
*value = ctrl.value;
return true;
}
pid_t
mp_camera_control_set_int32_bg(MPCamera *camera, uint32_t id, int32_t v)
{
struct v4l2_ext_control ctrl = {};
ctrl.id = id;
ctrl.value = v;
struct v4l2_ext_controls ctrls = {
.ctrl_class = 0,
.which = V4L2_CTRL_WHICH_CUR_VAL,
.count = 1,
.controls = &ctrl,
};
int fd = control_fd(camera);
// fork only after all the memory has been read
pid_t pid = fork();
if (pid == -1) {
return 0; // discard errors, nothing to do in parent process
} else if (pid != 0) {
// parent process adding pid to wait list (to clear zombie processes)
mp_camera_add_bg_task(camera, pid);
return pid;
}
// ignore errors
xioctl(fd, VIDIOC_S_EXT_CTRLS, &ctrls);
// exit without calling exit handlers
_exit(0);
}
bool
mp_camera_control_try_int32(MPCamera *camera, uint32_t id, int32_t *v)
{
return control_impl_int32(camera, id, VIDIOC_TRY_EXT_CTRLS, v);
}
bool
mp_camera_control_set_int32(MPCamera *camera, uint32_t id, int32_t v)
{
return control_impl_int32(camera, id, VIDIOC_S_EXT_CTRLS, &v);
}
int32_t
mp_camera_control_get_int32(MPCamera *camera, uint32_t id)
{
int32_t v = 0;
control_impl_int32(camera, id, VIDIOC_G_EXT_CTRLS, &v);
return v;
}
bool
mp_camera_control_try_boolean(MPCamera *camera, uint32_t id, bool *v)
{
int32_t value = *v;
bool s = control_impl_int32(camera, id, VIDIOC_TRY_EXT_CTRLS, &value);
*v = value;
return s;
}
bool
mp_camera_control_set_bool(MPCamera *camera, uint32_t id, bool v)
{
int32_t value = v;
return control_impl_int32(camera, id, VIDIOC_S_EXT_CTRLS, &value);
}
bool
mp_camera_control_get_bool(MPCamera *camera, uint32_t id)
{
int32_t v = false;
control_impl_int32(camera, id, VIDIOC_G_EXT_CTRLS, &v);
return v;
}
pid_t
mp_camera_control_set_bool_bg(MPCamera *camera, uint32_t id, bool v)
{
int32_t value = v;
return mp_camera_control_set_int32_bg(camera, id, value);
}
megapixels-1.4.3/src/camera.h 0000664 0000000 0000000 00000010573 14155633267 0016067 0 ustar 00root root 0000000 0000000 #pragma once
#include
#include
#include
#include
typedef enum {
MP_PIXEL_FMT_UNSUPPORTED,
MP_PIXEL_FMT_BGGR8,
MP_PIXEL_FMT_GBRG8,
MP_PIXEL_FMT_GRBG8,
MP_PIXEL_FMT_RGGB8,
MP_PIXEL_FMT_BGGR10P,
MP_PIXEL_FMT_GBRG10P,
MP_PIXEL_FMT_GRBG10P,
MP_PIXEL_FMT_RGGB10P,
MP_PIXEL_FMT_UYVY,
MP_PIXEL_FMT_YUYV,
MP_PIXEL_FMT_MAX,
} MPPixelFormat;
const char *mp_pixel_format_to_str(MPPixelFormat pixel_format);
MPPixelFormat mp_pixel_format_from_str(const char *str);
MPPixelFormat mp_pixel_format_from_v4l_pixel_format(uint32_t v4l_pixel_format);
MPPixelFormat mp_pixel_format_from_v4l_bus_code(uint32_t v4l_bus_code);
uint32_t mp_pixel_format_to_v4l_pixel_format(MPPixelFormat pixel_format);
uint32_t mp_pixel_format_to_v4l_bus_code(MPPixelFormat pixel_format);
uint32_t mp_pixel_format_bits_per_pixel(MPPixelFormat pixel_format);
uint32_t mp_pixel_format_pixel_depth(MPPixelFormat pixel_format);
uint32_t mp_pixel_format_width_to_bytes(MPPixelFormat pixel_format, uint32_t width);
uint32_t mp_pixel_format_width_to_colors(MPPixelFormat pixel_format, uint32_t width);
uint32_t mp_pixel_format_height_to_colors(MPPixelFormat pixel_format,
uint32_t height);
typedef struct {
MPPixelFormat pixel_format;
struct v4l2_fract frame_interval;
uint32_t width;
uint32_t height;
} MPCameraMode;
bool mp_camera_mode_is_equivalent(const MPCameraMode *m1, const MPCameraMode *m2);
typedef struct {
uint32_t index;
uint8_t *data;
int fd;
} MPBuffer;
typedef struct _MPCamera MPCamera;
MPCamera *mp_camera_new(int video_fd, int subdev_fd);
void mp_camera_free(MPCamera *camera);
void mp_camera_add_bg_task(MPCamera *camera, pid_t pid);
void mp_camera_wait_bg_tasks(MPCamera *camera);
bool mp_camera_check_task_complete(MPCamera *camera, pid_t pid);
bool mp_camera_is_subdev(MPCamera *camera);
int mp_camera_get_video_fd(MPCamera *camera);
int mp_camera_get_subdev_fd(MPCamera *camera);
const MPCameraMode *mp_camera_get_mode(const MPCamera *camera);
bool mp_camera_try_mode(MPCamera *camera, MPCameraMode *mode);
bool mp_camera_set_mode(MPCamera *camera, MPCameraMode *mode);
bool mp_camera_start_capture(MPCamera *camera);
bool mp_camera_stop_capture(MPCamera *camera);
bool mp_camera_is_capturing(MPCamera *camera);
bool mp_camera_capture_buffer(MPCamera *camera, MPBuffer *buffer);
bool mp_camera_release_buffer(MPCamera *camera, uint32_t buffer_index);
typedef struct _MPCameraModeList MPCameraModeList;
MPCameraModeList *mp_camera_list_supported_modes(MPCamera *camera);
MPCameraModeList *mp_camera_list_available_modes(MPCamera *camera);
MPCameraMode *mp_camera_mode_list_get(MPCameraModeList *list);
MPCameraModeList *mp_camera_mode_list_next(MPCameraModeList *list);
void mp_camera_mode_list_free(MPCameraModeList *list);
typedef struct {
uint32_t id;
uint32_t type;
char name[32];
int32_t min;
int32_t max;
int32_t step;
int32_t default_value;
uint32_t flags;
uint32_t element_size;
uint32_t element_count;
uint32_t dimensions_count;
uint32_t dimensions[V4L2_CTRL_MAX_DIMS];
} MPControl;
const char *mp_control_id_to_str(uint32_t id);
const char *mp_control_type_to_str(uint32_t type);
typedef struct _MPControlList MPControlList;
MPControlList *mp_camera_list_controls(MPCamera *camera);
MPControl *mp_control_list_get(MPControlList *list);
MPControlList *mp_control_list_next(MPControlList *list);
void mp_control_list_free(MPControlList *list);
bool mp_camera_query_control(MPCamera *camera, uint32_t id, MPControl *control);
bool mp_camera_control_try_int32(MPCamera *camera, uint32_t id, int32_t *v);
bool mp_camera_control_set_int32(MPCamera *camera, uint32_t id, int32_t v);
int32_t mp_camera_control_get_int32(MPCamera *camera, uint32_t id);
// set the value in the background, discards result
pid_t mp_camera_control_set_int32_bg(MPCamera *camera, uint32_t id, int32_t v);
bool mp_camera_control_try_bool(MPCamera *camera, uint32_t id, bool *v);
bool mp_camera_control_set_bool(MPCamera *camera, uint32_t id, bool v);
bool mp_camera_control_get_bool(MPCamera *camera, uint32_t id);
// set the value in the background, discards result
pid_t mp_camera_control_set_bool_bg(MPCamera *camera, uint32_t id, bool v);
megapixels-1.4.3/src/camera_config.c 0000664 0000000 0000000 00000026230 14155633267 0017404 0 ustar 00root root 0000000 0000000 #include "camera_config.h"
#include "config.h"
#include "ini.h"
#include "matrix.h"
#include
#include
#include
#include
#include
#include
static struct mp_camera_config cameras[MP_MAX_CAMERAS];
static size_t num_cameras = 0;
static char *exif_make;
static char *exif_model;
static bool
find_config(char *conffile)
{
char buf[512];
FILE *fp;
if (access("/proc/device-tree/compatible", F_OK) != -1) {
// Reads to compatible string of the current device tree, looks like:
// pine64,pinephone-1.2\0allwinner,sun50i-a64\0
fp = fopen("/proc/device-tree/compatible", "r");
fgets(buf, 512, fp);
fclose(fp);
// Check config/%dt.ini in the current working directory
sprintf(conffile, "config/%s.ini", buf);
if (access(conffile, F_OK) != -1) {
printf("Found config file at %s\n", conffile);
return true;
}
// Check for a config file in XDG_CONFIG_HOME
sprintf(conffile,
"%s/megapixels/config/%s.ini",
g_get_user_config_dir(),
buf);
if (access(conffile, F_OK) != -1) {
printf("Found config file at %s\n", conffile);
return true;
}
// Check user overridden /etc/megapixels/config/$dt.ini
sprintf(conffile, "%s/megapixels/config/%s.ini", SYSCONFDIR, buf);
if (access(conffile, F_OK) != -1) {
printf("Found config file at %s\n", conffile);
return true;
}
// Check packaged /usr/share/megapixels/config/$dt.ini
sprintf(conffile, "%s/megapixels/config/%s.ini", DATADIR, buf);
if (access(conffile, F_OK) != -1) {
printf("Found config file at %s\n", conffile);
return true;
}
printf("%s not found\n", conffile);
} else {
printf("Could not read device name from device tree\n");
}
// If all else fails, fall back to /etc/megapixels.ini
sprintf(conffile, "/etc/megapixels.ini");
if (access(conffile, F_OK) != -1) {
printf("Found config file at %s\n", conffile);
return true;
}
return false;
}
static int
strtoint(const char *nptr, char **endptr, int base)
{
long x = strtol(nptr, endptr, base);
assert(x <= INT_MAX);
return (int)x;
}
static bool
config_handle_camera_mode(const char *prefix,
MPCameraMode *mode,
const char *name,
const char *value)
{
int prefix_length = strlen(prefix);
if (strncmp(prefix, name, prefix_length) != 0)
return false;
name += prefix_length;
if (strcmp(name, "width") == 0) {
mode->width = strtoint(value, NULL, 10);
} else if (strcmp(name, "height") == 0) {
mode->height = strtoint(value, NULL, 10);
} else if (strcmp(name, "rate") == 0) {
mode->frame_interval.numerator = 1;
mode->frame_interval.denominator = strtoint(value, NULL, 10);
} else if (strcmp(name, "fmt") == 0) {
mode->pixel_format = mp_pixel_format_from_str(value);
if (mode->pixel_format == MP_PIXEL_FMT_UNSUPPORTED) {
g_printerr("Unsupported pixelformat %s\n", value);
exit(1);
}
} else {
return false;
}
return true;
}
static int
config_ini_handler(void *user,
const char *section,
const char *name,
const char *value)
{
if (strcmp(section, "device") == 0) {
if (strcmp(name, "make") == 0) {
exif_make = strdup(value);
} else if (strcmp(name, "model") == 0) {
exif_model = strdup(value);
} else {
g_printerr("Unknown key '%s' in [device]\n", name);
exit(1);
}
} else {
if (num_cameras == MP_MAX_CAMERAS) {
g_printerr("More cameras defined than NUM_CAMERAS\n");
exit(1);
}
size_t index = 0;
for (; index < num_cameras; ++index) {
if (strcmp(cameras[index].cfg_name, section) == 0) {
break;
}
}
if (index == num_cameras) {
printf("Adding camera %s from config\n", section);
++num_cameras;
cameras[index].index = index;
strcpy(cameras[index].cfg_name, section);
}
struct mp_camera_config *cc = &cameras[index];
if (config_handle_camera_mode(
"capture-", &cc->capture_mode, name, value)) {
} else if (config_handle_camera_mode(
"preview-", &cc->preview_mode, name, value)) {
} else if (strcmp(name, "rotate") == 0) {
cc->rotate = strtoint(value, NULL, 10);
} else if (strcmp(name, "mirrored") == 0) {
cc->mirrored = strcmp(value, "true") == 0;
} else if (strcmp(name, "driver") == 0) {
strcpy(cc->dev_name, value);
} else if (strcmp(name, "media-driver") == 0) {
strcpy(cc->media_dev_name, value);
} else if (strcmp(name, "media-links") == 0) {
char **linkdefs = g_strsplit(value, ",", 0);
for (int i = 0; i < MP_MAX_LINKS && linkdefs[i] != NULL;
++i) {
char **linkdef = g_strsplit(linkdefs[i], "->", 2);
char **porta = g_strsplit(linkdef[0], ":", 2);
char **portb = g_strsplit(linkdef[1], ":", 2);
strcpy(cc->media_links[i].source_name, porta[0]);
strcpy(cc->media_links[i].target_name, portb[0]);
cc->media_links[i].source_port =
strtoint(porta[1], NULL, 10);
cc->media_links[i].target_port =
strtoint(portb[1], NULL, 10);
g_strfreev(portb);
g_strfreev(porta);
g_strfreev(linkdef);
++cc->num_media_links;
}
g_strfreev(linkdefs);
} else if (strcmp(name, "colormatrix") == 0) {
sscanf(value,
"%f,%f,%f,%f,%f,%f,%f,%f,%f",
cc->colormatrix + 0,
cc->colormatrix + 1,
cc->colormatrix + 2,
cc->colormatrix + 3,
cc->colormatrix + 4,
cc->colormatrix + 5,
cc->colormatrix + 6,
cc->colormatrix + 7,
cc->colormatrix + 8);
} else if (strcmp(name, "forwardmatrix") == 0) {
sscanf(value,
"%f,%f,%f,%f,%f,%f,%f,%f,%f",
cc->forwardmatrix + 0,
cc->forwardmatrix + 1,
cc->forwardmatrix + 2,
cc->forwardmatrix + 3,
cc->forwardmatrix + 4,
cc->forwardmatrix + 5,
cc->forwardmatrix + 6,
cc->forwardmatrix + 7,
cc->forwardmatrix + 8);
} else if (strcmp(name, "whitelevel") == 0) {
cc->whitelevel = strtoint(value, NULL, 10);
} else if (strcmp(name, "blacklevel") == 0) {
cc->blacklevel = strtoint(value, NULL, 10);
} else if (strcmp(name, "focallength") == 0) {
cc->focallength = strtof(value, NULL);
} else if (strcmp(name, "cropfactor") == 0) {
cc->cropfactor = strtof(value, NULL);
} else if (strcmp(name, "fnumber") == 0) {
cc->fnumber = strtod(value, NULL);
} else if (strcmp(name, "iso-min") == 0) {
cc->iso_min = strtod(value, NULL);
} else if (strcmp(name, "iso-max") == 0) {
cc->iso_max = strtod(value, NULL);
} else if (strcmp(name, "flash-path") == 0) {
strcpy(cc->flash_path, value);
cc->has_flash = true;
} else if (strcmp(name, "flash-display") == 0) {
cc->flash_display = strcmp(value, "true") == 0;
if (cc->flash_display) {
cc->has_flash = true;
}
} else {
g_printerr("Unknown key '%s' in [%s]\n", name, section);
exit(1);
}
}
return 1;
}
void
calculate_matrices()
{
for (size_t i = 0; i < MP_MAX_CAMERAS; ++i) {
if (cameras[i].colormatrix != NULL &&
cameras[i].forwardmatrix != NULL) {
multiply_matrices(cameras[i].colormatrix,
cameras[i].forwardmatrix,
cameras[i].previewmatrix);
}
}
}
bool
mp_load_config()
{
char file[512];
if (!find_config(file)) {
g_printerr("Could not find any config file\n");
return false;
}
int result = ini_parse(file, config_ini_handler, NULL);
if (result == -1) {
g_printerr("Config file not found\n");
return false;
}
if (result == -2) {
g_printerr("Could not allocate memory to parse config file\n");
return false;
}
if (result != 0) {
g_printerr("Could not parse config file\n");
return false;
}
calculate_matrices();
return true;
}
const char *
mp_get_device_make()
{
return exif_make;
}
const char *
mp_get_device_model()
{
return exif_model;
}
const struct mp_camera_config *
mp_get_camera_config(size_t index)
{
if (index >= num_cameras)
return NULL;
return &cameras[index];
}
megapixels-1.4.3/src/camera_config.h 0000664 0000000 0000000 00000002151 14155633267 0017405 0 ustar 00root root 0000000 0000000 #pragma once
#include "camera.h"
#include
#include
#define MP_MAX_CAMERAS 5
#define MP_MAX_LINKS 10
struct mp_media_link_config {
char source_name[100];
char target_name[100];
int source_port;
int target_port;
};
struct mp_camera_config {
size_t index;
char cfg_name[100];
char dev_name[260];
char media_dev_name[260];
MPCameraMode capture_mode;
MPCameraMode preview_mode;
int rotate;
bool mirrored;
struct mp_media_link_config media_links[MP_MAX_LINKS];
int num_media_links;
float colormatrix[9];
float forwardmatrix[9];
float previewmatrix[9];
int blacklevel;
int whitelevel;
float focallength;
float cropfactor;
double fnumber;
int iso_min;
int iso_max;
char flash_path[260];
bool flash_display;
bool has_flash;
};
bool mp_load_config();
const char *mp_get_device_make();
const char *mp_get_device_model();
const struct mp_camera_config *mp_get_camera_config(size_t index);
megapixels-1.4.3/src/device.c 0000664 0000000 0000000 00000030407 14155633267 0016067 0 ustar 00root root 0000000 0000000 #include "device.h"
#include
#include
#include
#include
#include
#include
#include
bool
mp_find_device_path(struct media_v2_intf_devnode devnode, char *path, int length)
{
char uevent_path[256];
snprintf(uevent_path,
256,
"/sys/dev/char/%d:%d/uevent",
devnode.major,
devnode.minor);
FILE *f = fopen(uevent_path, "r");
if (!f) {
return false;
}
char line[512];
while (fgets(line, 512, f)) {
if (strncmp(line, "DEVNAME=", 8) == 0) {
// Drop newline
int length = strlen(line);
if (line[length - 1] == '\n')
line[length - 1] = '\0';
snprintf(path, length, "/dev/%s", line + 8);
return true;
}
}
fclose(f);
return false;
}
struct _MPDevice {
int fd;
struct media_device_info info;
struct media_v2_entity *entities;
size_t num_entities;
struct media_v2_interface *interfaces;
size_t num_interfaces;
struct media_v2_pad *pads;
size_t num_pads;
struct media_v2_link *links;
size_t num_links;
};
static void
errno_printerr(const char *s)
{
g_printerr("MPDevice: %s error %d, %s\n", s, errno, strerror(errno));
}
static int
xioctl(int fd, int request, void *arg)
{
int r;
do {
r = ioctl(fd, request, arg);
} while (r == -1 && errno == EINTR);
return r;
}
MPDevice *
mp_device_find(const char *driver_name)
{
MPDeviceList *list = mp_device_list_new();
MPDevice *found_device = mp_device_list_find_remove(&list, driver_name);
mp_device_list_free(list);
return found_device;
}
MPDevice *
mp_device_open(const char *path)
{
int fd = open(path, O_RDWR);
if (fd == -1) {
errno_printerr("open");
return NULL;
}
return mp_device_new(fd);
}
MPDevice *
mp_device_new(int fd)
{
// Get the topology of the media device
struct media_v2_topology topology = {};
if (xioctl(fd, MEDIA_IOC_G_TOPOLOGY, &topology) == -1 ||
topology.num_entities == 0) {
close(fd);
return NULL;
}
// Create the device
MPDevice *device = calloc(1, sizeof(MPDevice));
device->fd = fd;
device->entities =
calloc(topology.num_entities, sizeof(struct media_v2_entity));
device->num_entities = topology.num_entities;
device->interfaces =
calloc(topology.num_interfaces, sizeof(struct media_v2_interface));
device->num_interfaces = topology.num_interfaces;
device->pads = calloc(topology.num_pads, sizeof(struct media_v2_pad));
device->num_pads = topology.num_pads;
device->links = calloc(topology.num_links, sizeof(struct media_v2_link));
device->num_links = topology.num_links;
// Get the actual devices and interfaces
topology.ptr_entities = (uint64_t)device->entities;
topology.ptr_interfaces = (uint64_t)device->interfaces;
topology.ptr_pads = (uint64_t)device->pads;
topology.ptr_links = (uint64_t)device->links;
if (xioctl(fd, MEDIA_IOC_G_TOPOLOGY, &topology) == -1) {
errno_printerr("MEDIA_IOC_G_TOPOLOGY");
mp_device_close(device);
return NULL;
}
// Get device info
if (xioctl(fd, MEDIA_IOC_DEVICE_INFO, &device->info) == -1) {
errno_printerr("MEDIA_IOC_DEVICE_INFO");
mp_device_close(device);
return NULL;
}
return device;
}
void
mp_device_close(MPDevice *device)
{
close(device->fd);
free(device->entities);
free(device->interfaces);
free(device->pads);
free(device->links);
free(device);
}
bool
mp_device_setup_link(MPDevice *device,
uint32_t source_pad_id,
uint32_t sink_pad_id,
bool enabled)
{
const struct media_v2_pad *source_pad =
mp_device_get_pad(device, source_pad_id);
g_return_val_if_fail(source_pad, false);
const struct media_v2_pad *sink_pad = mp_device_get_pad(device, sink_pad_id);
g_return_val_if_fail(sink_pad, false);
struct media_link_desc link = {};
link.flags = enabled ? MEDIA_LNK_FL_ENABLED : 0;
link.source.entity = source_pad->entity_id;
link.source.index = 0;
link.sink.entity = sink_pad->entity_id;
link.sink.index = 0;
if (xioctl(device->fd, MEDIA_IOC_SETUP_LINK, &link) == -1) {
errno_printerr("MEDIA_IOC_SETUP_LINK");
return false;
}
return true;
}
const struct media_v2_entity *
mp_device_find_entity(const MPDevice *device, const char *driver_name)
{
int length = strlen(driver_name);
// Find the entity from the name
for (uint32_t i = 0; i < device->num_entities; ++i) {
if (strncmp(device->entities[i].name, driver_name, length) == 0) {
return &device->entities[i];
}
}
return NULL;
}
const struct media_v2_entity *
mp_device_find_entity_type(const MPDevice *device, const uint32_t type)
{
// Find the entity from the entity type
for (uint32_t i = 0; i < device->num_entities; ++i) {
if (device->entities[i].function == type) {
return &device->entities[i];
}
}
return NULL;
}
const struct media_device_info *
mp_device_get_info(const MPDevice *device)
{
return &device->info;
}
const struct media_v2_entity *
mp_device_get_entity(const MPDevice *device, uint32_t id)
{
for (int i = 0; i < device->num_entities; ++i) {
if (device->entities[i].id == id) {
return &device->entities[i];
}
}
return NULL;
}
const struct media_v2_entity *
mp_device_get_entities(const MPDevice *device)
{
return device->entities;
}
size_t
mp_device_get_num_entities(const MPDevice *device)
{
return device->num_entities;
}
const struct media_v2_interface *
mp_device_find_entity_interface(const MPDevice *device, uint32_t entity_id)
{
// Find the interface through the link
const struct media_v2_link *link = mp_device_find_link_to(device, entity_id);
if (!link) {
return NULL;
}
return mp_device_get_interface(device, link->source_id);
}
const struct media_v2_interface *
mp_device_get_interface(const MPDevice *device, uint32_t id)
{
for (int i = 0; i < device->num_interfaces; ++i) {
if (device->interfaces[i].id == id) {
return &device->interfaces[i];
}
}
return NULL;
}
const struct media_v2_interface *
mp_device_get_interfaces(const MPDevice *device)
{
return device->interfaces;
}
size_t
mp_device_get_num_interfaces(const MPDevice *device)
{
return device->num_interfaces;
}
const struct media_v2_pad *
mp_device_get_pad_from_entity(const MPDevice *device, uint32_t entity_id)
{
for (int i = 0; i < device->num_pads; ++i) {
if (device->pads[i].entity_id == entity_id) {
return &device->pads[i];
}
}
return NULL;
}
const struct media_v2_pad *
mp_device_get_pad(const MPDevice *device, uint32_t id)
{
for (int i = 0; i < device->num_pads; ++i) {
if (device->pads[i].id == id) {
return &device->pads[i];
}
}
return NULL;
}
const struct media_v2_pad *
mp_device_get_pads(const MPDevice *device)
{
return device->pads;
}
size_t
mp_device_get_num_pads(const MPDevice *device)
{
return device->num_pads;
}
const struct media_v2_link *
mp_device_find_entity_link(const MPDevice *device, uint32_t entity_id)
{
const struct media_v2_pad *pad =
mp_device_get_pad_from_entity(device, entity_id);
const struct media_v2_link *link = mp_device_find_link_to(device, pad->id);
if (link) {
return link;
}
return mp_device_find_link_from(device, pad->id);
}
const struct media_v2_link *
mp_device_find_link_from(const MPDevice *device, uint32_t source)
{
for (int i = 0; i < device->num_links; ++i) {
if (device->links[i].source_id == source) {
return &device->links[i];
}
}
return NULL;
}
const struct media_v2_link *
mp_device_find_link_to(const MPDevice *device, uint32_t sink)
{
for (int i = 0; i < device->num_links; ++i) {
if (device->links[i].sink_id == sink) {
return &device->links[i];
}
}
return NULL;
}
const struct media_v2_link *
mp_device_find_link_between(const MPDevice *device, uint32_t source, uint32_t sink)
{
for (int i = 0; i < device->num_links; ++i) {
if (device->links[i].source_id == source &&
device->links[i].sink_id == sink) {
return &device->links[i];
}
}
return NULL;
}
const struct media_v2_link *
mp_device_get_link(const MPDevice *device, uint32_t id)
{
for (int i = 0; i < device->num_links; ++i) {
if (device->links[i].id == id) {
return &device->links[i];
}
}
return NULL;
}
const struct media_v2_link *
mp_device_get_links(const MPDevice *device)
{
return device->links;
}
size_t
mp_device_get_num_links(const MPDevice *device)
{
return device->num_links;
}
struct _MPDeviceList {
MPDevice *device;
MPDeviceList *next;
};
MPDeviceList *
mp_device_list_new()
{
MPDeviceList *current = NULL;
// Enumerate media device files
struct dirent *dir;
DIR *d = opendir("/dev");
while ((dir = readdir(d)) != NULL) {
if (strncmp(dir->d_name, "media", 5) == 0) {
char path[261];
snprintf(path, 261, "/dev/%s", dir->d_name);
MPDevice *device = mp_device_open(path);
if (device) {
MPDeviceList *next = malloc(sizeof(MPDeviceList));
next->device = device;
next->next = current;
current = next;
}
}
}
closedir(d);
return current;
}
void
mp_device_list_free(MPDeviceList *device_list)
{
while (device_list) {
MPDeviceList *tmp = device_list;
device_list = tmp->next;
mp_device_close(tmp->device);
free(tmp);
}
}
MPDevice *
mp_device_list_find_remove(MPDeviceList **list, const char *driver_name)
{
MPDevice *found_device = NULL;
int length = strlen(driver_name);
while (*list) {
MPDevice *device = mp_device_list_get(*list);
const struct media_device_info *info = mp_device_get_info(device);
if (strncmp(info->driver, driver_name, length) == 0) {
found_device = mp_device_list_remove(list);
break;
}
list = &(*list)->next;
}
return found_device;
}
MPDevice *
mp_device_list_remove(MPDeviceList **device_list)
{
MPDevice *device = (*device_list)->device;
if ((*device_list)->next) {
MPDeviceList *tmp = (*device_list)->next;
**device_list = *tmp;
free(tmp);
} else {
free(*device_list);
*device_list = NULL;
}
return device;
}
MPDevice *
mp_device_list_get(const MPDeviceList *device_list)
{
return device_list->device;
}
MPDeviceList *
mp_device_list_next(const MPDeviceList *device_list)
{
return device_list->next;
}
megapixels-1.4.3/src/device.h 0000664 0000000 0000000 00000006262 14155633267 0016076 0 ustar 00root root 0000000 0000000 #pragma once
#include
#include
#include
#include
bool
mp_find_device_path(struct media_v2_intf_devnode devnode, char *path, int length);
typedef struct _MPDevice MPDevice;
MPDevice *mp_device_find(const char *driver_name);
MPDevice *mp_device_open(const char *path);
MPDevice *mp_device_new(int fd);
void mp_device_close(MPDevice *device);
bool mp_device_setup_link(MPDevice *device,
uint32_t source_pad_id,
uint32_t sink_pad_id,
bool enabled);
const struct media_device_info *mp_device_get_info(const MPDevice *device);
const struct media_v2_entity *mp_device_find_entity(const MPDevice *device,
const char *driver_name);
const struct media_v2_entity *mp_device_find_entity_type(const MPDevice *device,
const uint32_t type);
const struct media_v2_entity *mp_device_get_entity(const MPDevice *device,
uint32_t id);
const struct media_v2_entity *mp_device_get_entities(const MPDevice *device);
size_t mp_device_get_num_entities(const MPDevice *device);
const struct media_v2_interface *
mp_device_find_entity_interface(const MPDevice *device, uint32_t entity_id);
const struct media_v2_interface *mp_device_get_interface(const MPDevice *device,
uint32_t id);
const struct media_v2_interface *mp_device_get_interfaces(const MPDevice *device);
size_t mp_device_get_num_interfaces(const MPDevice *device);
const struct media_v2_pad *mp_device_get_pad_from_entity(const MPDevice *device,
uint32_t entity_id);
const struct media_v2_pad *mp_device_get_pad(const MPDevice *device, uint32_t id);
const struct media_v2_pad *mp_device_get_pads(const MPDevice *device);
size_t mp_device_get_num_pads(const MPDevice *device);
const struct media_v2_link *mp_device_find_entity_link(const MPDevice *device,
uint32_t entity_id);
const struct media_v2_link *mp_device_find_link_from(const MPDevice *device,
uint32_t source);
const struct media_v2_link *mp_device_find_link_to(const MPDevice *device,
uint32_t sink);
const struct media_v2_link *
mp_device_find_link_between(const MPDevice *device, uint32_t source, uint32_t sink);
const struct media_v2_link *mp_device_get_link(const MPDevice *device, uint32_t id);
const struct media_v2_link *mp_device_get_links(const MPDevice *device);
size_t mp_device_get_num_links(const MPDevice *device);
typedef struct _MPDeviceList MPDeviceList;
MPDeviceList *mp_device_list_new();
void mp_device_list_free(MPDeviceList *device_list);
MPDevice *mp_device_list_find_remove(MPDeviceList **device_list,
const char *driver_name);
MPDevice *mp_device_list_remove(MPDeviceList **device_list);
MPDevice *mp_device_list_get(const MPDeviceList *device_list);
MPDeviceList *mp_device_list_next(const MPDeviceList *device_list);
megapixels-1.4.3/src/flash.c 0000664 0000000 0000000 00000013575 14155633267 0015734 0 ustar 00root root 0000000 0000000 #include "flash.h"
#include "gtk/gtk.h"
#include
#include
#include
#include
typedef enum {
FLASH_TYPE_LED,
FLASH_TYPE_DISPLAY,
} FlashType;
typedef struct {
char path[260];
int fd;
} MPLEDFlash;
typedef struct {
} MPDisplayFlash;
struct _MPFlash {
FlashType type;
union {
MPLEDFlash led;
MPDisplayFlash display;
};
};
MPFlash *
mp_led_flash_from_path(const char *path)
{
MPFlash *flash = malloc(sizeof(MPFlash));
flash->type = FLASH_TYPE_LED;
strncpy(flash->led.path, path, 259);
char mpath[275];
snprintf(mpath, 275, "%s/flash_strobe", path);
flash->led.fd = open(mpath, O_WRONLY);
if (flash->led.fd == -1) {
g_printerr("Failed to open %s\n", mpath);
free(flash);
return NULL;
}
return flash;
}
static GtkWidget *flash_window = NULL;
static GDBusProxy *dbus_brightness_proxy = NULL;
static int dbus_old_brightness = 0;
static void
dbus_brightness_init(GObject *src, GAsyncResult *res, gpointer *user_data)
{
g_autoptr(GError) err = NULL;
dbus_brightness_proxy = g_dbus_proxy_new_finish(res, &err);
if (!dbus_brightness_proxy || err) {
printf("Failed to connect to dbus brightness service %s\n",
err->message);
return;
}
}
void
mp_flash_gtk_init(GDBusConnection *conn)
{
g_dbus_proxy_new(conn,
G_DBUS_PROXY_FLAGS_NONE,
NULL,
"org.gnome.SettingsDaemon.Power",
"/org/gnome/SettingsDaemon/Power",
"org.gnome.SettingsDaemon.Power.Screen",
NULL,
(GAsyncReadyCallback)dbus_brightness_init,
NULL);
// Create a full screen full white window as a flash
GtkWidget *window = gtk_window_new();
// gtk_window_set_accept_focus(GTK_WINDOW(flash->display.window), FALSE);
gtk_window_set_decorated(GTK_WINDOW(window), FALSE);
gtk_window_fullscreen(GTK_WINDOW(window));
GtkStyleContext *context;
context = gtk_widget_get_style_context(window);
gtk_style_context_add_class(context, "flash");
flash_window = window;
}
void
mp_flash_gtk_clean()
{
gtk_window_destroy(GTK_WINDOW(flash_window));
g_object_unref(dbus_brightness_proxy);
}
MPFlash *
mp_create_display_flash()
{
MPFlash *flash = malloc(sizeof(MPFlash));
flash->type = FLASH_TYPE_DISPLAY;
return flash;
}
void
mp_flash_free(MPFlash *flash)
{
switch (flash->type) {
case FLASH_TYPE_LED:
close(flash->led.fd);
break;
case FLASH_TYPE_DISPLAY:
break;
}
free(flash);
}
static void
set_display_brightness(int brightness)
{
g_dbus_proxy_call(dbus_brightness_proxy,
"org.freedesktop.DBus.Properties.Set",
g_variant_new("(ssv)",
"org.gnome.SettingsDaemon.Power.Screen",
"Brightness",
g_variant_new("i", brightness)),
G_DBUS_CALL_FLAGS_NONE,
-1,
NULL,
NULL,
NULL);
}
static void
brightness_received(GDBusProxy *proxy, GAsyncResult *res, gpointer user_data)
{
g_autoptr(GError) error = NULL;
g_autoptr(GVariant) result = g_dbus_proxy_call_finish(proxy, res, &error);
if (!result) {
printf("Failed to get display brightness: %s\n", error->message);
return;
}
g_autoptr(GVariant) values = g_variant_get_child_value(result, 0);
if (g_variant_n_children(values) == 0) {
return;
}
g_autoptr(GVariant) brightness = g_variant_get_child_value(values, 0);
dbus_old_brightness = g_variant_get_int32(brightness);
}
static bool
show_display_flash(MPFlash *flash)
{
if (!flash_window)
return false;
gtk_widget_show(flash_window);
// First get brightness and then set brightness to 100%
if (!dbus_brightness_proxy)
return false;
g_dbus_proxy_call(dbus_brightness_proxy,
"org.freedesktop.DBus.Properties.Get",
g_variant_new("(ss)",
"org.gnome.SettingsDaemon.Power.Screen",
"Brightness"),
G_DBUS_CALL_FLAGS_NONE,
-1,
NULL,
(GAsyncReadyCallback)brightness_received,
NULL);
set_display_brightness(100);
return false;
}
void
mp_flash_enable(MPFlash *flash)
{
switch (flash->type) {
case FLASH_TYPE_LED:
lseek(flash->led.fd, 0, SEEK_SET);
dprintf(flash->led.fd, "1\n");
break;
case FLASH_TYPE_DISPLAY:
g_main_context_invoke(NULL, (GSourceFunc)show_display_flash, flash);
break;
}
}
static bool
hide_display_flash(MPFlash *flash)
{
if (!flash_window)
return false;
gtk_widget_hide(flash_window);
set_display_brightness(dbus_old_brightness);
return false;
}
void
mp_flash_disable(MPFlash *flash)
{
switch (flash->type) {
case FLASH_TYPE_LED:
// Flash gets reset automatically
break;
case FLASH_TYPE_DISPLAY:
g_main_context_invoke(NULL, (GSourceFunc)hide_display_flash, flash);
break;
}
}
megapixels-1.4.3/src/flash.h 0000664 0000000 0000000 00000000514 14155633267 0015726 0 ustar 00root root 0000000 0000000 #include "gio/gio.h"
typedef struct _MPFlash MPFlash;
void mp_flash_gtk_init(GDBusConnection *conn);
void mp_flash_gtk_clean();
MPFlash *mp_led_flash_from_path(const char *path);
MPFlash *mp_create_display_flash();
void mp_flash_free(MPFlash *flash);
void mp_flash_enable(MPFlash *flash);
void mp_flash_disable(MPFlash *flash);
megapixels-1.4.3/src/gl_util.c 0000664 0000000 0000000 00000015776 14155633267 0016303 0 ustar 00root root 0000000 0000000 #include "gl_util.h"
#include
#include
#include
#include
#include
#include
#include
void
gl_util_check_error(const char *file, int line)
{
GLenum error = glGetError();
const char *name;
switch (error) {
case GL_NO_ERROR:
return; // no error
case GL_INVALID_ENUM:
name = "GL_INVALID_ENUM";
break;
case GL_INVALID_VALUE:
name = "GL_INVALID_VALUE";
break;
case GL_INVALID_OPERATION:
name = "GL_INVALID_OPERATION";
break;
case GL_INVALID_FRAMEBUFFER_OPERATION:
name = "GL_INVALID_FRAMEBUFFER_OPERATION";
break;
case GL_OUT_OF_MEMORY:
name = "GL_OUT_OF_MEMORY";
break;
default:
name = "UNKNOWN ERROR!";
break;
}
printf("GL error at %s:%d - %s\n", file, line, name);
// raise(SIGTRAP);
}
GLuint
gl_util_load_shader(const char *resource,
GLenum type,
const char **extra_sources,
size_t num_extra)
{
GdkGLContext *context = gdk_gl_context_get_current();
assert(context);
GLuint shader = glCreateShader(type);
if (shader == 0) {
return 0;
}
GBytes *bytes = g_resources_lookup_data(resource, 0, NULL);
if (!bytes) {
printf("Failed to load shader resource %s\n", resource);
return 0;
}
// Build #define for OpenGL context information
gboolean is_es = gdk_gl_context_get_use_es(context);
int major, minor;
gdk_gl_context_get_version(context, &major, &minor);
char context_info_buf[128];
snprintf(context_info_buf,
128,
"#define %s\n#define GL_%d\n#define GL_%d_%d\n",
is_es ? "GL_ES" : "GL_NO_ES",
major,
major,
minor);
gsize glib_size = 0;
const GLchar *source = g_bytes_get_data(bytes, &glib_size);
if (glib_size == 0 || glib_size > INT_MAX) {
printf("Invalid size for resource\n");
return 0;
}
const GLchar **sources = malloc((num_extra + 1) * sizeof(GLchar *));
GLint *sizes = malloc((num_extra + 1) * sizeof(GLint));
for (size_t i = 0; i < num_extra; ++i) {
sources[i] = extra_sources[i];
sizes[i] = -1;
}
sources[num_extra] = source;
sizes[num_extra] = glib_size;
glShaderSource(shader, num_extra + 1, sources, sizes);
glCompileShader(shader);
check_gl();
free(sources);
free(sizes);
g_bytes_unref(bytes);
// Check compile status
GLint success;
glGetShaderiv(shader, GL_COMPILE_STATUS, &success);
if (success == GL_FALSE) {
printf("Shader compilation failed for %s\n", resource);
glDeleteShader(shader);
return 0;
}
GLint log_length;
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &log_length);
if (log_length > 0) {
char *log = malloc(sizeof(char) * log_length);
glGetShaderInfoLog(shader, log_length - 1, &log_length, log);
printf("Shader %s log: %s\n", resource, log);
free(log);
glDeleteShader(shader);
return 0;
}
return shader;
}
GLuint
gl_util_link_program(GLuint *shaders, size_t num_shaders)
{
GLuint program = glCreateProgram();
for (size_t i = 0; i < num_shaders; ++i) {
glAttachShader(program, shaders[i]);
}
glLinkProgram(program);
check_gl();
GLint success;
glGetProgramiv(program, GL_LINK_STATUS, &success);
if (success == GL_FALSE) {
printf("Program linking failed\n");
}
GLint log_length;
glGetProgramiv(program, GL_INFO_LOG_LENGTH, &log_length);
if (log_length > 0) {
char *log = malloc(sizeof(char) * log_length);
glGetProgramInfoLog(program, log_length - 1, &log_length, log);
printf("Program log: %s\n", log);
free(log);
}
check_gl();
return program;
}
static const GLfloat quad_data[] = {
// Vertices
-1,
-1,
1,
-1,
-1,
1,
1,
1,
// Texcoords
0,
0,
1,
0,
0,
1,
1,
1,
};
GLuint
gl_util_new_quad()
{
GdkGLContext *context = gdk_gl_context_get_current();
assert(context);
if (gdk_gl_context_get_use_es(context)) {
return 0;
} else {
GLuint buffer;
glGenBuffers(1, &buffer);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glBufferData(GL_ARRAY_BUFFER,
sizeof(quad_data),
quad_data,
GL_STATIC_DRAW);
check_gl();
glBindBuffer(GL_ARRAY_BUFFER, 0);
check_gl();
return buffer;
}
}
void
gl_util_bind_quad(GLuint buffer)
{
GdkGLContext *context = gdk_gl_context_get_current();
assert(context);
if (gdk_gl_context_get_use_es(context)) {
glVertexAttribPointer(
GL_UTIL_VERTEX_ATTRIBUTE, 2, GL_FLOAT, 0, 0, quad_data);
check_gl();
glEnableVertexAttribArray(GL_UTIL_VERTEX_ATTRIBUTE);
check_gl();
glVertexAttribPointer(GL_UTIL_TEX_COORD_ATTRIBUTE,
2,
GL_FLOAT,
0,
0,
quad_data + 8);
check_gl();
glEnableVertexAttribArray(GL_UTIL_TEX_COORD_ATTRIBUTE);
check_gl();
} else {
glBindBuffer(GL_ARRAY_BUFFER, buffer);
check_gl();
glVertexAttribPointer(
GL_UTIL_VERTEX_ATTRIBUTE, 2, GL_FLOAT, GL_FALSE, 0, 0);
glEnableVertexAttribArray(GL_UTIL_VERTEX_ATTRIBUTE);
check_gl();
glVertexAttribPointer(GL_UTIL_TEX_COORD_ATTRIBUTE,
2,
GL_FLOAT,
GL_FALSE,
0,
(void *)(8 * sizeof(float)));
glEnableVertexAttribArray(GL_UTIL_TEX_COORD_ATTRIBUTE);
check_gl();
}
}
void
gl_util_draw_quad(GLuint buffer)
{
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
check_gl();
}
megapixels-1.4.3/src/gl_util.h 0000664 0000000 0000000 00000001137 14155633267 0016272 0 ustar 00root root 0000000 0000000 #pragma once
#include
#include
#define GL_UTIL_VERTEX_ATTRIBUTE 0
#define GL_UTIL_TEX_COORD_ATTRIBUTE 1
#define check_gl() gl_util_check_error(__FILE__, __LINE__)
void gl_util_check_error(const char *file, int line);
GLuint gl_util_load_shader(const char *resource,
GLenum type,
const char **extra_sources,
size_t num_extra);
GLuint gl_util_link_program(GLuint *shaders, size_t num_shaders);
GLuint gl_util_new_quad();
void gl_util_bind_quad(GLuint buffer);
void gl_util_draw_quad(GLuint buffer);
megapixels-1.4.3/src/gles2_debayer.c 0000664 0000000 0000000 00000011324 14155633267 0017334 0 ustar 00root root 0000000 0000000 #include "gles2_debayer.h"
#include "camera.h"
#include "gl_util.h"
#include
#define VERTEX_ATTRIBUTE 0
#define TEX_COORD_ATTRIBUTE 1
struct _GLES2Debayer {
GLuint frame_buffer;
GLuint program;
GLuint uniform_transform;
GLuint uniform_pixel_size;
GLuint uniform_texture;
GLuint uniform_color_matrix;
GLuint quad;
};
GLES2Debayer *
gles2_debayer_new(MPPixelFormat format)
{
if (format != MP_PIXEL_FMT_BGGR8) {
return NULL;
}
GLuint frame_buffer;
glGenFramebuffers(1, &frame_buffer);
check_gl();
GLuint shaders[] = {
gl_util_load_shader("/org/postmarketos/Megapixels/debayer.vert",
GL_VERTEX_SHADER,
NULL,
0),
gl_util_load_shader("/org/postmarketos/Megapixels/debayer.frag",
GL_FRAGMENT_SHADER,
NULL,
0),
};
GLuint program = gl_util_link_program(shaders, 2);
glBindAttribLocation(program, VERTEX_ATTRIBUTE, "vert");
glBindAttribLocation(program, TEX_COORD_ATTRIBUTE, "tex_coord");
check_gl();
GLES2Debayer *self = malloc(sizeof(GLES2Debayer));
self->frame_buffer = frame_buffer;
self->program = program;
self->uniform_transform = glGetUniformLocation(self->program, "transform");
self->uniform_pixel_size = glGetUniformLocation(self->program, "pixel_size");
self->uniform_texture = glGetUniformLocation(self->program, "texture");
self->uniform_color_matrix =
glGetUniformLocation(self->program, "color_matrix");
check_gl();
self->quad = gl_util_new_quad();
return self;
}
void
gles2_debayer_free(GLES2Debayer *self)
{
glDeleteFramebuffers(1, &self->frame_buffer);
glDeleteProgram(self->program);
free(self);
}
void
gles2_debayer_use(GLES2Debayer *self)
{
glUseProgram(self->program);
check_gl();
gl_util_bind_quad(self->quad);
}
void
gles2_debayer_configure(GLES2Debayer *self,
const uint32_t dst_width,
const uint32_t dst_height,
const uint32_t src_width,
const uint32_t src_height,
const uint32_t rotation,
const bool mirrored,
const float *colormatrix,
const uint8_t blacklevel)
{
glViewport(0, 0, dst_width, dst_height);
check_gl();
GLfloat rotation_list[4] = { 0, -1, 0, 1 };
int rotation_index = 4 - rotation / 90;
GLfloat sin_rot = rotation_list[rotation_index];
GLfloat cos_rot = rotation_list[(rotation_index + 1) % 4];
GLfloat scale_x = mirrored ? 1 : -1;
GLfloat matrix[9] = {
// clang-format off
cos_rot * scale_x, sin_rot, 0,
-sin_rot * scale_x, cos_rot, 0,
0, 0, 1,
// clang-format on
};
glUniformMatrix3fv(self->uniform_transform, 1, GL_FALSE, matrix);
check_gl();
GLfloat pixel_size_x = 1.0f / src_width;
GLfloat pixel_size_y = 1.0f / src_height;
glUniform2f(self->uniform_pixel_size, pixel_size_x, pixel_size_y);
check_gl();
if (colormatrix) {
GLfloat transposed[9];
for (int i = 0; i < 3; ++i)
for (int j = 0; j < 3; ++j)
transposed[i + j * 3] = colormatrix[j + i * 3];
glUniformMatrix3fv(
self->uniform_color_matrix, 1, GL_FALSE, transposed);
} else {
static const GLfloat identity[9] = {
// clang-format off
1, 0, 0,
0, 1, 0,
0, 0, 1,
// clang-format on
};
glUniformMatrix3fv(
self->uniform_color_matrix, 1, GL_FALSE, identity);
}
check_gl();
}
void
gles2_debayer_process(GLES2Debayer *self, GLuint dst_id, GLuint source_id)
{
glBindFramebuffer(GL_FRAMEBUFFER, self->frame_buffer);
glBindTexture(GL_TEXTURE_2D, dst_id);
glFramebufferTexture2D(
GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, dst_id, 0);
check_gl();
assert(glCheckFramebufferStatus(GL_FRAMEBUFFER) == GL_FRAMEBUFFER_COMPLETE);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, source_id);
glUniform1i(self->uniform_texture, 0);
check_gl();
gl_util_draw_quad(self->quad);
}
megapixels-1.4.3/src/gles2_debayer.h 0000664 0000000 0000000 00000001507 14155633267 0017343 0 ustar 00root root 0000000 0000000 #include "camera.h"
#include "gl_util.h"
#include
#include
typedef struct _GLES2Debayer GLES2Debayer;
GLES2Debayer *gles2_debayer_new(MPPixelFormat format);
void gles2_debayer_free(GLES2Debayer *self);
void gles2_debayer_use(GLES2Debayer *self);
void gles2_debayer_configure(GLES2Debayer *self,
const uint32_t dst_width,
const uint32_t dst_height,
const uint32_t src_width,
const uint32_t src_height,
const uint32_t rotation,
const bool mirrored,
const float *colormatrix,
const uint8_t blacklevel);
void gles2_debayer_process(GLES2Debayer *self, GLuint dst_id, GLuint source_id);
megapixels-1.4.3/src/ini.c 0000664 0000000 0000000 00000010644 14155633267 0015410 0 ustar 00root root 0000000 0000000 /* inih -- simple .INI file parser
inih is released under the New BSD license (see LICENSE.txt). Go to the project
home page for more info:
https://github.com/benhoyt/inih
*/
#if defined(_MSC_VER) && !defined(_CRT_SECURE_NO_WARNINGS)
#define _CRT_SECURE_NO_WARNINGS
#endif
#include
#include
#include
#include "ini.h"
#if !INI_USE_STACK
#include
#endif
#define MAX_SECTION 50
#define MAX_NAME 50
/* Strip whitespace chars off end of given string, in place. Return s. */
static char *
rstrip(char *s)
{
char *p = s + strlen(s);
while (p > s && isspace((unsigned char)(*--p)))
*p = '\0';
return s;
}
/* Return pointer to first non-whitespace char in given string. */
static char *
lskip(const char *s)
{
while (*s && isspace((unsigned char)(*s)))
s++;
return (char *)s;
}
/* Return pointer to first char (of chars) or inline comment in given string,
or pointer to null at end of string if neither found. Inline comment must
be prefixed by a whitespace character to register as a comment. */
static char *
find_chars_or_comment(const char *s, const char *chars)
{
#if INI_ALLOW_INLINE_COMMENTS
int was_space = 0;
while (*s && (!chars || !strchr(chars, *s)) &&
!(was_space && strchr(INI_INLINE_COMMENT_PREFIXES, *s))) {
was_space = isspace((unsigned char)(*s));
s++;
}
#else
while (*s && (!chars || !strchr(chars, *s))) {
s++;
}
#endif
return (char *)s;
}
/* Version of strncpy that ensures dest (size bytes) is null-terminated. */
static char *
strncpy0(char *dest, const char *src, size_t size)
{
strncpy(dest, src, size - 1);
dest[size - 1] = '\0';
return dest;
}
/* See documentation in header file. */
int
ini_parse_stream(ini_reader reader, void *stream, ini_handler handler, void *user)
{
/* Uses a fair bit of stack (use heap instead if you need to) */
#if INI_USE_STACK
char line[INI_MAX_LINE];
#else
char *line;
#endif
char section[MAX_SECTION] = "";
char prev_name[MAX_NAME] = "";
char *start;
char *end;
char *name;
char *value;
int lineno = 0;
int error = 0;
#if !INI_USE_STACK
line = (char *)malloc(INI_MAX_LINE);
if (!line) {
return -2;
}
#endif
/* Scan through stream line by line */
while (reader(line, INI_MAX_LINE, stream) != NULL) {
lineno++;
start = line;
#if INI_ALLOW_BOM
if (lineno == 1 && (unsigned char)start[0] == 0xEF &&
(unsigned char)start[1] == 0xBB &&
(unsigned char)start[2] == 0xBF) {
start += 3;
}
#endif
start = lskip(rstrip(start));
if (*start == ';' || *start == '#') {
/* Per Python configparser, allow both ; and # comments at the
start of a line */
}
#if INI_ALLOW_MULTILINE
else if (*prev_name && *start && start > line) {
/* Non-blank line with leading whitespace, treat as continuation
of previous name's value (as per Python configparser). */
if (!handler(user, section, prev_name, start) && !error)
error = lineno;
}
#endif
else if (*start == '[') {
/* A "[section]" line */
end = find_chars_or_comment(start + 1, "]");
if (*end == ']') {
*end = '\0';
strncpy0(section, start + 1, sizeof(section));
*prev_name = '\0';
} else if (!error) {
/* No ']' found on section line */
error = lineno;
}
} else if (*start) {
/* Not a comment, must be a name[=:]value pair */
end = find_chars_or_comment(start, "=:");
if (*end == '=' || *end == ':') {
*end = '\0';
name = rstrip(start);
value = lskip(end + 1);
#if INI_ALLOW_INLINE_COMMENTS
end = find_chars_or_comment(value, NULL);
if (*end)
*end = '\0';
#endif
rstrip(value);
/* Valid name[=:]value pair found, call handler */
strncpy0(prev_name, name, sizeof(prev_name));
if (!handler(user, section, name, value) && !error)
error = lineno;
memset(value, 0, strlen(value));
} else if (!error) {
/* No '=' or ':' found on name[=:]value line */
error = lineno;
}
}
#if INI_STOP_ON_FIRST_ERROR
if (error)
break;
#endif
}
#if !INI_USE_STACK
free(line);
#endif
return error;
}
/* See documentation in header file. */
int
ini_parse_file(FILE *file, ini_handler handler, void *user)
{
return ini_parse_stream((ini_reader)fgets, file, handler, user);
}
/* See documentation in header file. */
int
ini_parse(const char *filename, ini_handler handler, void *user)
{
FILE *file;
int error;
file = fopen(filename, "r");
if (!file)
return -1;
error = ini_parse_file(file, handler, user);
fclose(file);
return error;
}
megapixels-1.4.3/src/ini.h 0000664 0000000 0000000 00000006000 14155633267 0015404 0 ustar 00root root 0000000 0000000 /* inih -- simple .INI file parser
inih is released under the New BSD license (see LICENSE.txt). Go to the project
home page for more info:
https://github.com/benhoyt/inih
*/
#ifndef __INI_H__
#define __INI_H__
/* Make this header file easier to include in C++ code */
#ifdef __cplusplus
extern "C" {
#endif
#include
/* Typedef for prototype of handler function. */
typedef int (*ini_handler)(void *user, const char *section, const char *name,
const char *value);
/* Typedef for prototype of fgets-style reader function. */
typedef char *(*ini_reader)(char *str, int num, void *stream);
/* Parse given INI-style file. May have [section]s, name=value pairs
(whitespace stripped), and comments starting with ';' (semicolon). Section
is "" if name=value pair parsed before any section heading. name:value
pairs are also supported as a concession to Python's configparser.
For each name=value pair parsed, call handler function with given user
pointer as well as section, name, and value (data only valid for duration
of handler call). Handler should return nonzero on success, zero on error.
Returns 0 on success, line number of first error on parse error (doesn't
stop on first error), -1 on file open error, or -2 on memory allocation
error (only when INI_USE_STACK is zero).
*/
int ini_parse(const char *filename, ini_handler handler, void *user);
/* Same as ini_parse(), but takes a FILE* instead of filename. This doesn't
close the file when it's finished -- the caller must do that. */
int ini_parse_file(FILE *file, ini_handler handler, void *user);
/* Same as ini_parse(), but takes an ini_reader function pointer instead of
filename. Used for implementing custom or string-based I/O. */
int ini_parse_stream(ini_reader reader, void *stream, ini_handler handler,
void *user);
/* Nonzero to allow multi-line value parsing, in the style of Python's
configparser. If allowed, ini_parse() will call the handler with the same
name for each subsequent line parsed. */
#ifndef INI_ALLOW_MULTILINE
#define INI_ALLOW_MULTILINE 1
#endif
/* Nonzero to allow a UTF-8 BOM sequence (0xEF 0xBB 0xBF) at the start of
the file. See http://code.google.com/p/inih/issues/detail?id=21 */
#ifndef INI_ALLOW_BOM
#define INI_ALLOW_BOM 1
#endif
/* Nonzero to allow inline comments (with valid inline comment characters
specified by INI_INLINE_COMMENT_PREFIXES). Set to 0 to turn off and match
Python 3.2+ configparser behaviour. */
#ifndef INI_ALLOW_INLINE_COMMENTS
#define INI_ALLOW_INLINE_COMMENTS 1
#endif
#ifndef INI_INLINE_COMMENT_PREFIXES
#define INI_INLINE_COMMENT_PREFIXES ";"
#endif
/* Nonzero to use stack, zero to use heap (malloc/free). */
#ifndef INI_USE_STACK
#define INI_USE_STACK 1
#endif
/* Stop parsing on first error (default is to keep parsing). */
#ifndef INI_STOP_ON_FIRST_ERROR
#define INI_STOP_ON_FIRST_ERROR 0
#endif
/* Maximum line length for any line in INI file. */
#ifndef INI_MAX_LINE
#define INI_MAX_LINE 2000
#endif
#ifdef __cplusplus
}
#endif
#endif /* __INI_H__ */
megapixels-1.4.3/src/io_pipeline.c 0000664 0000000 0000000 00000052253 14155633267 0017127 0 ustar 00root root 0000000 0000000 #include "io_pipeline.h"
#include "camera.h"
#include "device.h"
#include "flash.h"
#include "pipeline.h"
#include "process_pipeline.h"
#include
#include
#include
#include
#include
#include
#include
struct media_link_info {
unsigned int source_entity_id;
unsigned int target_entity_id;
char source_fname[260];
char target_fname[260];
};
struct camera_info {
size_t device_index;
unsigned int pad_id;
char dev_fname[260];
int fd;
MPCamera *camera;
MPFlash *flash;
int gain_ctrl;
int gain_max;
bool has_auto_focus_continuous;
bool has_auto_focus_start;
// unsigned int entity_id;
// enum v4l2_buf_type type;
// char media_dev_fname[260];
// char video_dev_fname[260];
// int media_fd;
// struct mp_media_link media_links[MP_MAX_LINKS];
// int num_media_links;
// int gain_ctrl;
};
struct device_info {
const char *media_dev_name; // owned by camera config
MPDevice *device;
unsigned int interface_pad_id;
int video_fd;
};
static struct camera_info cameras[MP_MAX_CAMERAS];
static struct device_info devices[MP_MAX_CAMERAS];
static size_t num_devices = 0;
static const struct mp_camera_config *camera = NULL;
static MPCameraMode mode;
static bool just_switched_mode = false;
static int blank_frame_count = 0;
static int burst_length;
static int captures_remaining = 0;
static int preview_width;
static int preview_height;
static int device_rotation;
static bool save_dng;
struct control_state {
bool gain_is_manual;
int gain;
bool exposure_is_manual;
int exposure;
};
static struct control_state desired_controls = {};
static struct control_state current_controls = {};
static bool flash_enabled = false;
static bool want_focus = false;
static MPPipeline *pipeline;
static GSource *capture_source;
static void
setup_camera(MPDeviceList **device_list, const struct mp_camera_config *config)
{
// Find device info
size_t device_index = 0;
for (; device_index < num_devices; ++device_index) {
if (strcmp(config->media_dev_name,
devices[device_index].media_dev_name) == 0) {
break;
}
}
if (device_index == num_devices) {
device_index = num_devices;
// Initialize new device
struct device_info *info = &devices[device_index];
info->media_dev_name = config->media_dev_name;
info->device = mp_device_list_find_remove(device_list,
info->media_dev_name);
if (!info->device) {
g_printerr("Could not find /dev/media* node matching '%s'\n",
info->media_dev_name);
exit(EXIT_FAILURE);
}
const struct media_v2_entity *entity =
mp_device_find_entity_type(info->device, MEDIA_ENT_F_IO_V4L);
if (!entity) {
g_printerr("Could not find device video entity\n");
exit(EXIT_FAILURE);
}
const struct media_v2_pad *pad =
mp_device_get_pad_from_entity(info->device, entity->id);
info->interface_pad_id = pad->id;
const struct media_v2_interface *interface =
mp_device_find_entity_interface(info->device, entity->id);
char dev_name[260];
if (!mp_find_device_path(interface->devnode, dev_name, 260)) {
g_printerr("Could not find video path\n");
exit(EXIT_FAILURE);
}
info->video_fd = open(dev_name, O_RDWR);
if (info->video_fd == -1) {
g_printerr("Could not open %s: %s\n",
dev_name,
strerror(errno));
exit(EXIT_FAILURE);
}
++num_devices;
}
{
struct camera_info *info = &cameras[config->index];
struct device_info *dev_info = &devices[device_index];
info->device_index = device_index;
const struct media_v2_entity *entity =
mp_device_find_entity(dev_info->device, config->dev_name);
if (!entity) {
g_printerr("Could not find camera entity matching '%s'\n",
config->dev_name);
exit(EXIT_FAILURE);
}
const struct media_v2_pad *pad =
mp_device_get_pad_from_entity(dev_info->device, entity->id);
info->pad_id = pad->id;
// Make sure the camera starts out as disabled
mp_device_setup_link(dev_info->device,
info->pad_id,
dev_info->interface_pad_id,
false);
const struct media_v2_interface *interface =
mp_device_find_entity_interface(dev_info->device,
entity->id);
if (!mp_find_device_path(interface->devnode, info->dev_fname, 260)) {
g_printerr("Could not find camera device path\n");
exit(EXIT_FAILURE);
}
info->fd = open(info->dev_fname, O_RDWR);
if (info->fd == -1) {
g_printerr("Could not open %s: %s\n",
info->dev_fname,
strerror(errno));
exit(EXIT_FAILURE);
}
info->camera = mp_camera_new(dev_info->video_fd, info->fd);
// Start with the capture format, this works around a bug with
// the ov5640 driver where it won't allow setting the preview
// format initially.
MPCameraMode mode = config->capture_mode;
mp_camera_set_mode(info->camera, &mode);
// Trigger continuous auto focus if the sensor supports it
if (mp_camera_query_control(
info->camera, V4L2_CID_FOCUS_AUTO, NULL)) {
info->has_auto_focus_continuous = true;
mp_camera_control_set_bool_bg(
info->camera, V4L2_CID_FOCUS_AUTO, true);
}
if (mp_camera_query_control(
info->camera, V4L2_CID_AUTO_FOCUS_START, NULL)) {
info->has_auto_focus_start = true;
}
MPControl control;
if (mp_camera_query_control(info->camera, V4L2_CID_GAIN, &control)) {
info->gain_ctrl = V4L2_CID_GAIN;
info->gain_max = control.max;
} else if (mp_camera_query_control(
info->camera, V4L2_CID_ANALOGUE_GAIN, &control)) {
info->gain_ctrl = V4L2_CID_ANALOGUE_GAIN;
info->gain_max = control.max;
}
// Setup flash
if (config->flash_path[0]) {
info->flash = mp_led_flash_from_path(config->flash_path);
} else if (config->flash_display) {
info->flash = mp_create_display_flash();
} else {
info->flash = NULL;
}
}
}
static void
setup(MPPipeline *pipeline, const void *data)
{
MPDeviceList *device_list = mp_device_list_new();
for (size_t i = 0; i < MP_MAX_CAMERAS; ++i) {
const struct mp_camera_config *config = mp_get_camera_config(i);
if (!config) {
break;
}
setup_camera(&device_list, config);
}
mp_device_list_free(device_list);
}
static void
clean_cameras()
{
for (size_t i = 0; i < MP_MAX_CAMERAS; ++i) {
struct camera_info *info = &cameras[i];
if (info->camera) {
mp_camera_free(info->camera);
info->camera = NULL;
}
}
}
void
mp_io_pipeline_start()
{
mp_process_pipeline_start();
pipeline = mp_pipeline_new();
mp_pipeline_invoke(pipeline, setup, NULL, 0);
}
void
mp_io_pipeline_stop()
{
if (capture_source) {
g_source_destroy(capture_source);
}
clean_cameras();
mp_pipeline_free(pipeline);
mp_process_pipeline_stop();
}
static void
update_process_pipeline()
{
struct camera_info *info = &cameras[camera->index];
// Grab the latest control values
if (!current_controls.gain_is_manual) {
current_controls.gain =
mp_camera_control_get_int32(info->camera, info->gain_ctrl);
}
if (!current_controls.exposure_is_manual) {
current_controls.exposure =
mp_camera_control_get_int32(info->camera, V4L2_CID_EXPOSURE);
}
struct mp_process_pipeline_state pipeline_state = {
.camera = camera,
.mode = mode,
.burst_length = burst_length,
.save_dng = save_dng,
.preview_width = preview_width,
.preview_height = preview_height,
.device_rotation = device_rotation,
.gain_is_manual = current_controls.gain_is_manual,
.gain = current_controls.gain,
.gain_max = info->gain_max,
.exposure_is_manual = current_controls.exposure_is_manual,
.exposure = current_controls.exposure,
.has_auto_focus_continuous = info->has_auto_focus_continuous,
.has_auto_focus_start = info->has_auto_focus_start,
};
mp_process_pipeline_update_state(&pipeline_state);
}
static void
focus(MPPipeline *pipeline, const void *data)
{
want_focus = true;
}
void
mp_io_pipeline_focus()
{
mp_pipeline_invoke(pipeline, focus, NULL, 0);
}
static void
capture(MPPipeline *pipeline, const void *data)
{
struct camera_info *info = &cameras[camera->index];
captures_remaining = burst_length;
// Disable the autogain/exposure while taking the burst
mp_camera_control_set_int32(info->camera, V4L2_CID_AUTOGAIN, 0);
mp_camera_control_set_int32(
info->camera, V4L2_CID_EXPOSURE_AUTO, V4L2_EXPOSURE_MANUAL);
// Change camera mode for capturing
mp_process_pipeline_sync();
mp_camera_stop_capture(info->camera);
mode = camera->capture_mode;
mp_camera_set_mode(info->camera, &mode);
just_switched_mode = true;
mp_camera_start_capture(info->camera);
// Enable flash
if (info->flash && flash_enabled) {
mp_flash_enable(info->flash);
}
update_process_pipeline();
mp_process_pipeline_capture();
}
void
mp_io_pipeline_capture()
{
mp_pipeline_invoke(pipeline, capture, NULL, 0);
}
static void
release_buffer(MPPipeline *pipeline, const uint32_t *buffer_index)
{
struct camera_info *info = &cameras[camera->index];
mp_camera_release_buffer(info->camera, *buffer_index);
}
void
mp_io_pipeline_release_buffer(uint32_t buffer_index)
{
mp_pipeline_invoke(pipeline,
(MPPipelineCallback)release_buffer,
&buffer_index,
sizeof(uint32_t));
}
static pid_t focus_continuous_task = 0;
static pid_t start_focus_task = 0;
static void
start_focus(struct camera_info *info)
{
// only run 1 manual focus at once
if (!mp_camera_check_task_complete(info->camera, start_focus_task) ||
!mp_camera_check_task_complete(info->camera, focus_continuous_task))
return;
if (info->has_auto_focus_continuous) {
focus_continuous_task = mp_camera_control_set_bool_bg(
info->camera, V4L2_CID_FOCUS_AUTO, 1);
} else if (info->has_auto_focus_start) {
start_focus_task = mp_camera_control_set_bool_bg(
info->camera, V4L2_CID_AUTO_FOCUS_START, 1);
}
}
static void
update_controls()
{
// Don't update controls while capturing
if (captures_remaining > 0) {
return;
}
struct camera_info *info = &cameras[camera->index];
if (want_focus) {
start_focus(info);
want_focus = false;
}
if (current_controls.gain_is_manual != desired_controls.gain_is_manual) {
mp_camera_control_set_bool_bg(info->camera,
V4L2_CID_AUTOGAIN,
!desired_controls.gain_is_manual);
}
if (desired_controls.gain_is_manual &&
current_controls.gain != desired_controls.gain) {
mp_camera_control_set_int32_bg(
info->camera, info->gain_ctrl, desired_controls.gain);
}
if (current_controls.exposure_is_manual !=
desired_controls.exposure_is_manual) {
mp_camera_control_set_int32_bg(info->camera,
V4L2_CID_EXPOSURE_AUTO,
desired_controls.exposure_is_manual ?
V4L2_EXPOSURE_MANUAL :
V4L2_EXPOSURE_AUTO);
}
if (desired_controls.exposure_is_manual &&
current_controls.exposure != desired_controls.exposure) {
mp_camera_control_set_int32_bg(
info->camera, V4L2_CID_EXPOSURE, desired_controls.exposure);
}
current_controls = desired_controls;
}
static void
on_frame(MPBuffer buffer, void *_data)
{
// Only update controls right after a frame was captured
update_controls();
// When the mode is switched while capturing we get a couple blank frames,
// presumably from buffers made ready during the switch. Ignore these.
if (just_switched_mode) {
if (blank_frame_count < 20) {
// Only check a 10x10 area
size_t test_size =
MIN(10, mode.width) * MIN(10, mode.height);
bool image_is_blank = true;
for (size_t i = 0; i < test_size; ++i) {
if (buffer.data[i] != 0) {
image_is_blank = false;
}
}
if (image_is_blank) {
++blank_frame_count;
return;
}
} else {
printf("Blank image limit reached, resulting capture may be blank\n");
}
just_switched_mode = false;
blank_frame_count = 0;
}
// Send the image off for processing
mp_process_pipeline_process_image(buffer);
if (captures_remaining > 0) {
--captures_remaining;
if (captures_remaining == 0) {
struct camera_info *info = &cameras[camera->index];
// Restore the auto exposure and gain if needed
if (!current_controls.exposure_is_manual) {
mp_camera_control_set_int32_bg(
info->camera,
V4L2_CID_EXPOSURE_AUTO,
V4L2_EXPOSURE_AUTO);
}
if (!current_controls.gain_is_manual) {
mp_camera_control_set_bool_bg(
info->camera, V4L2_CID_AUTOGAIN, true);
}
// Go back to preview mode
mp_process_pipeline_sync();
mp_camera_stop_capture(info->camera);
mode = camera->preview_mode;
mp_camera_set_mode(info->camera, &mode);
just_switched_mode = true;
mp_camera_start_capture(info->camera);
// Disable flash
if (info->flash) {
mp_flash_disable(info->flash);
}
update_process_pipeline();
}
}
}
static void
update_state(MPPipeline *pipeline, const struct mp_io_pipeline_state *state)
{
// Make sure the state isn't updated more than it needs to be by checking
// whether this state change actually changes anything.
bool has_changed = false;
if (camera != state->camera) {
has_changed = true;
if (camera) {
struct camera_info *info = &cameras[camera->index];
struct device_info *dev_info = &devices[info->device_index];
mp_process_pipeline_sync();
mp_camera_stop_capture(info->camera);
mp_device_setup_link(dev_info->device,
info->pad_id,
dev_info->interface_pad_id,
false);
}
if (capture_source) {
g_source_destroy(capture_source);
capture_source = NULL;
}
camera = state->camera;
if (camera) {
struct camera_info *info = &cameras[camera->index];
struct device_info *dev_info = &devices[info->device_index];
mp_device_setup_link(dev_info->device,
info->pad_id,
dev_info->interface_pad_id,
true);
mode = camera->preview_mode;
mp_camera_set_mode(info->camera, &mode);
mp_camera_start_capture(info->camera);
capture_source = mp_pipeline_add_capture_source(
pipeline, info->camera, on_frame, NULL);
current_controls.gain_is_manual =
mp_camera_control_get_bool(info->camera,
V4L2_CID_AUTOGAIN) == 0;
current_controls.gain = mp_camera_control_get_int32(
info->camera, info->gain_ctrl);
current_controls.exposure_is_manual =
mp_camera_control_get_int32(
info->camera, V4L2_CID_EXPOSURE_AUTO) ==
V4L2_EXPOSURE_MANUAL;
current_controls.exposure = mp_camera_control_get_int32(
info->camera, V4L2_CID_EXPOSURE);
}
}
has_changed = has_changed || burst_length != state->burst_length ||
preview_width != state->preview_width ||
preview_height != state->preview_height ||
device_rotation != state->device_rotation;
burst_length = state->burst_length;
preview_width = state->preview_width;
preview_height = state->preview_height;
device_rotation = state->device_rotation;
save_dng = state->save_dng;
if (camera) {
struct control_state previous_desired = desired_controls;
desired_controls.gain_is_manual = state->gain_is_manual;
desired_controls.gain = state->gain;
desired_controls.exposure_is_manual = state->exposure_is_manual;
desired_controls.exposure = state->exposure;
has_changed = has_changed ||
memcmp(&previous_desired,
&desired_controls,
sizeof(struct control_state)) != 0 ||
flash_enabled != state->flash_enabled;
flash_enabled = state->flash_enabled;
}
assert(has_changed);
update_process_pipeline();
}
void
mp_io_pipeline_update_state(const struct mp_io_pipeline_state *state)
{
mp_pipeline_invoke(pipeline,
(MPPipelineCallback)update_state,
state,
sizeof(struct mp_io_pipeline_state));
}
megapixels-1.4.3/src/io_pipeline.h 0000664 0000000 0000000 00000001207 14155633267 0017125 0 ustar 00root root 0000000 0000000 #pragma once
#include "camera_config.h"
struct mp_io_pipeline_state {
const struct mp_camera_config *camera;
int burst_length;
int preview_width;
int preview_height;
int device_rotation;
bool gain_is_manual;
int gain;
bool exposure_is_manual;
int exposure;
bool save_dng;
bool flash_enabled;
};
void mp_io_pipeline_start();
void mp_io_pipeline_stop();
void mp_io_pipeline_focus();
void mp_io_pipeline_capture();
void mp_io_pipeline_release_buffer(uint32_t buffer_index);
void mp_io_pipeline_update_state(const struct mp_io_pipeline_state *state);
megapixels-1.4.3/src/main.c 0000664 0000000 0000000 00000114260 14155633267 0015554 0 ustar 00root root 0000000 0000000 #include "main.h"
#include "camera_config.h"
#include "flash.h"
#include "gl_util.h"
#include "io_pipeline.h"
#include "process_pipeline.h"
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
// #define RENDERDOC
#ifdef RENDERDOC
#include
#include
RENDERDOC_API_1_1_2 *rdoc_api = NULL;
#endif
enum user_control { USER_CONTROL_ISO, USER_CONTROL_SHUTTER };
static bool camera_is_initialized = false;
static const struct mp_camera_config *camera = NULL;
static MPCameraMode mode;
static int preview_width = -1;
static int preview_height = -1;
static int device_rotation = 0;
static bool gain_is_manual = false;
static int gain;
static int gain_max;
static bool exposure_is_manual = false;
static int exposure;
static bool has_auto_focus_continuous;
static bool has_auto_focus_start;
static bool flash_enabled = false;
static bool setting_save_dng;
static MPProcessPipelineBuffer *current_preview_buffer = NULL;
static int preview_buffer_width = -1;
static int preview_buffer_height = -1;
static char last_path[260] = "";
static MPZBarScanResult *zbar_result = NULL;
static int burst_length = 3;
// Widgets
GtkWidget *preview;
GtkWidget *main_stack;
GtkWidget *open_last_stack;
GtkWidget *thumb_last;
GtkWidget *process_spinner;
GtkWidget *scanned_codes;
GtkWidget *preview_top_box;
GtkWidget *preview_bottom_box;
GtkWidget *flash_button;
GSettings *settings;
int
remap(int value, int input_min, int input_max, int output_min, int output_max)
{
const long long factor = 1000000000;
long long output_spread = output_max - output_min;
long long input_spread = input_max - input_min;
long long zero_value = value - input_min;
zero_value *= factor;
long long percentage = zero_value / input_spread;
long long zero_output = percentage * output_spread / factor;
long long result = output_min + zero_output;
return (int)result;
}
static void
update_io_pipeline()
{
struct mp_io_pipeline_state io_state = {
.camera = camera,
.burst_length = burst_length,
.preview_width = preview_width,
.preview_height = preview_height,
.device_rotation = device_rotation,
.gain_is_manual = gain_is_manual,
.gain = gain,
.exposure_is_manual = exposure_is_manual,
.exposure = exposure,
.save_dng = setting_save_dng,
.flash_enabled = flash_enabled,
};
mp_io_pipeline_update_state(&io_state);
// Make the right settings available for the camera
gtk_widget_set_visible(flash_button, camera->has_flash);
}
static bool
update_state(const struct mp_main_state *state)
{
if (!camera_is_initialized) {
camera_is_initialized = true;
}
if (camera == state->camera) {
mode = state->mode;
if (!gain_is_manual) {
gain = state->gain;
}
gain_max = state->gain_max;
if (!exposure_is_manual) {
exposure = state->exposure;
}
has_auto_focus_continuous = state->has_auto_focus_continuous;
has_auto_focus_start = state->has_auto_focus_start;
}
preview_buffer_width = state->image_width;
preview_buffer_height = state->image_height;
return false;
}
void
mp_main_update_state(const struct mp_main_state *state)
{
struct mp_main_state *state_copy = malloc(sizeof(struct mp_main_state));
*state_copy = *state;
g_main_context_invoke_full(g_main_context_default(),
G_PRIORITY_DEFAULT_IDLE,
(GSourceFunc)update_state,
state_copy,
free);
}
static bool
set_zbar_result(MPZBarScanResult *result)
{
if (zbar_result) {
for (uint8_t i = 0; i < zbar_result->size; ++i) {
free(zbar_result->codes[i].data);
}
free(zbar_result);
}
zbar_result = result;
gtk_widget_queue_draw(preview);
return false;
}
void
mp_main_set_zbar_result(MPZBarScanResult *result)
{
g_main_context_invoke_full(g_main_context_default(),
G_PRIORITY_DEFAULT_IDLE,
(GSourceFunc)set_zbar_result,
result,
NULL);
}
static bool
set_preview(MPProcessPipelineBuffer *buffer)
{
if (current_preview_buffer) {
mp_process_pipeline_buffer_unref(current_preview_buffer);
}
current_preview_buffer = buffer;
gtk_widget_queue_draw(preview);
return false;
}
void
mp_main_set_preview(MPProcessPipelineBuffer *buffer)
{
g_main_context_invoke_full(g_main_context_default(),
G_PRIORITY_DEFAULT_IDLE,
(GSourceFunc)set_preview,
buffer,
NULL);
}
struct capture_completed_args {
GdkTexture *thumb;
char *fname;
};
static bool
capture_completed(struct capture_completed_args *args)
{
strncpy(last_path, args->fname, 259);
gtk_image_set_from_paintable(GTK_IMAGE(thumb_last),
GDK_PAINTABLE(args->thumb));
gtk_spinner_stop(GTK_SPINNER(process_spinner));
gtk_stack_set_visible_child(GTK_STACK(open_last_stack), thumb_last);
g_object_unref(args->thumb);
g_free(args->fname);
return false;
}
void
mp_main_capture_completed(GdkTexture *thumb, const char *fname)
{
struct capture_completed_args *args =
malloc(sizeof(struct capture_completed_args));
args->thumb = thumb;
args->fname = g_strdup(fname);
g_main_context_invoke_full(g_main_context_default(),
G_PRIORITY_DEFAULT_IDLE,
(GSourceFunc)capture_completed,
args,
free);
}
static GLuint blit_program;
static GLuint blit_uniform_transform;
static GLuint blit_uniform_texture;
static GLuint solid_program;
static GLuint solid_uniform_color;
static GLuint quad;
static void
preview_realize(GtkGLArea *area)
{
gtk_gl_area_make_current(area);
if (gtk_gl_area_get_error(area) != NULL) {
return;
}
// Make a VAO for OpenGL
if (!gtk_gl_area_get_use_es(area)) {
GLuint vao;
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
check_gl();
}
GLuint blit_shaders[] = {
gl_util_load_shader("/org/postmarketos/Megapixels/blit.vert",
GL_VERTEX_SHADER,
NULL,
0),
gl_util_load_shader("/org/postmarketos/Megapixels/blit.frag",
GL_FRAGMENT_SHADER,
NULL,
0),
};
blit_program = gl_util_link_program(blit_shaders, 2);
glBindAttribLocation(blit_program, GL_UTIL_VERTEX_ATTRIBUTE, "vert");
glBindAttribLocation(blit_program, GL_UTIL_TEX_COORD_ATTRIBUTE, "tex_coord");
check_gl();
blit_uniform_transform = glGetUniformLocation(blit_program, "transform");
blit_uniform_texture = glGetUniformLocation(blit_program, "texture");
GLuint solid_shaders[] = {
gl_util_load_shader("/org/postmarketos/Megapixels/solid.vert",
GL_VERTEX_SHADER,
NULL,
0),
gl_util_load_shader("/org/postmarketos/Megapixels/solid.frag",
GL_FRAGMENT_SHADER,
NULL,
0),
};
solid_program = gl_util_link_program(solid_shaders, 2);
glBindAttribLocation(solid_program, GL_UTIL_VERTEX_ATTRIBUTE, "vert");
check_gl();
solid_uniform_color = glGetUniformLocation(solid_program, "color");
quad = gl_util_new_quad();
}
static void
position_preview(float *offset_x, float *offset_y, float *size_x, float *size_y)
{
int buffer_width, buffer_height;
if (device_rotation == 0 || device_rotation == 180) {
buffer_width = preview_buffer_width;
buffer_height = preview_buffer_height;
} else {
buffer_width = preview_buffer_height;
buffer_height = preview_buffer_width;
}
int scale_factor = gtk_widget_get_scale_factor(preview);
int top_height =
gtk_widget_get_allocated_height(preview_top_box) * scale_factor;
int bottom_height =
gtk_widget_get_allocated_height(preview_bottom_box) * scale_factor;
int inner_height = preview_height - top_height - bottom_height;
double scale = MIN(preview_width / (float)buffer_width,
preview_height / (float)buffer_height);
*size_x = scale * buffer_width;
*size_y = scale * buffer_height;
*offset_x = (preview_width - *size_x) / 2.0;
if (*size_y > inner_height) {
*offset_y = (preview_height - *size_y) / 2.0;
} else {
*offset_y = top_height + (inner_height - *size_y) / 2.0;
}
}
static gboolean
preview_draw(GtkGLArea *area, GdkGLContext *ctx, gpointer data)
{
if (gtk_gl_area_get_error(area) != NULL) {
return FALSE;
}
if (!camera_is_initialized) {
return FALSE;
}
#ifdef RENDERDOC
if (rdoc_api) {
rdoc_api->StartFrameCapture(NULL, NULL);
}
#endif
glClearColor(0, 0, 0, 1);
glClear(GL_COLOR_BUFFER_BIT);
float offset_x, offset_y, size_x, size_y;
position_preview(&offset_x, &offset_y, &size_x, &size_y);
glViewport(offset_x, preview_height - size_y - offset_y, size_x, size_y);
if (current_preview_buffer) {
glUseProgram(blit_program);
GLfloat rotation_list[4] = { 0, -1, 0, 1 };
int rotation_index = device_rotation / 90;
GLfloat sin_rot = rotation_list[rotation_index];
GLfloat cos_rot = rotation_list[(4 + rotation_index - 1) % 4];
GLfloat matrix[9] = {
// clang-format off
cos_rot, sin_rot, 0,
-sin_rot, cos_rot, 0,
0, 0, 1,
// clang-format on
};
glUniformMatrix3fv(blit_uniform_transform, 1, GL_FALSE, matrix);
check_gl();
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D,
mp_process_pipeline_buffer_get_texture_id(
current_preview_buffer));
glUniform1i(blit_uniform_texture, 0);
check_gl();
gl_util_bind_quad(quad);
gl_util_draw_quad(quad);
}
if (zbar_result) {
GLuint buffer;
if (!gtk_gl_area_get_use_es(area)) {
glGenBuffers(1, &buffer);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
check_gl();
}
glUseProgram(solid_program);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glUniform4f(solid_uniform_color, 1, 0, 0, 0.5);
for (uint8_t i = 0; i < zbar_result->size; ++i) {
MPZBarCode *code = &zbar_result->codes[i];
GLfloat vertices[] = {
code->bounds_x[0], code->bounds_y[0],
code->bounds_x[1], code->bounds_y[1],
code->bounds_x[3], code->bounds_y[3],
code->bounds_x[2], code->bounds_y[2],
};
for (int i = 0; i < 4; ++i) {
vertices[i * 2] =
2 * vertices[i * 2] / preview_buffer_width -
1.0;
vertices[i * 2 + 1] =
1.0 - 2 * vertices[i * 2 + 1] /
preview_buffer_height;
}
if (gtk_gl_area_get_use_es(area)) {
glVertexAttribPointer(GL_UTIL_VERTEX_ATTRIBUTE,
2,
GL_FLOAT,
0,
0,
vertices);
check_gl();
glEnableVertexAttribArray(GL_UTIL_VERTEX_ATTRIBUTE);
check_gl();
} else {
glBufferData(GL_ARRAY_BUFFER,
sizeof(vertices),
vertices,
GL_STREAM_DRAW);
check_gl();
glVertexAttribPointer(GL_UTIL_VERTEX_ATTRIBUTE,
2,
GL_FLOAT,
GL_FALSE,
0,
0);
glEnableVertexAttribArray(GL_UTIL_VERTEX_ATTRIBUTE);
check_gl();
}
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
check_gl();
}
glDisable(GL_BLEND);
glBindBuffer(GL_ARRAY_BUFFER, 0);
}
glFlush();
#ifdef RENDERDOC
if (rdoc_api) {
rdoc_api->EndFrameCapture(NULL, NULL);
}
#endif
return FALSE;
}
static gboolean
preview_resize(GtkWidget *widget, int width, int height, gpointer data)
{
if (preview_width != width || preview_height != height) {
preview_width = width;
preview_height = height;
update_io_pipeline();
}
return TRUE;
}
void
run_open_last_action(GSimpleAction *action, GVariant *param, gpointer user_data)
{
char uri[275];
g_autoptr(GError) error = NULL;
if (strlen(last_path) == 0) {
return;
}
sprintf(uri, "file://%s", last_path);
if (!g_app_info_launch_default_for_uri(uri, NULL, &error)) {
g_printerr("Could not launch image viewer for '%s': %s\n",
uri,
error->message);
}
}
void
run_open_photos_action(GSimpleAction *action, GVariant *param, gpointer user_data)
{
char uri[270];
g_autoptr(GError) error = NULL;
sprintf(uri, "file://%s", g_get_user_special_dir(G_USER_DIRECTORY_PICTURES));
if (!g_app_info_launch_default_for_uri(uri, NULL, &error)) {
g_printerr("Could not launch image viewer: %s\n", error->message);
}
}
void
run_capture_action(GSimpleAction *action, GVariant *param, gpointer user_data)
{
gtk_spinner_start(GTK_SPINNER(process_spinner));
gtk_stack_set_visible_child(GTK_STACK(open_last_stack), process_spinner);
mp_io_pipeline_capture();
}
void
run_about_action(GSimpleAction *action, GVariant *param, GApplication *app)
{
gtk_show_about_dialog(NULL,
"program-name",
"Megapixels",
"title",
"Megapixels",
"logo-icon-name",
"org.postmarketos.Megapixels",
"comments",
"The postmarketOS camera application",
"website",
"https://sr.ht/~martijnbraam/megapixels",
"version",
VERSION,
"license-type",
GTK_LICENSE_GPL_3_0_ONLY,
NULL);
}
void
run_quit_action(GSimpleAction *action, GVariant *param, GApplication *app)
{
g_application_quit(app);
}
static bool
check_point_inside_bounds(int x, int y, int *bounds_x, int *bounds_y)
{
bool right = false, left = false, top = false, bottom = false;
for (int i = 0; i < 4; ++i) {
if (x <= bounds_x[i])
left = true;
if (x >= bounds_x[i])
right = true;
if (y <= bounds_y[i])
top = true;
if (y >= bounds_y[i])
bottom = true;
}
return right && left && top && bottom;
}
static void
on_zbar_dialog_response(GtkDialog *dialog, int response, char *data)
{
g_autoptr(GError) error = NULL;
switch (response) {
case GTK_RESPONSE_YES:
if (!g_app_info_launch_default_for_uri(data, NULL, &error)) {
g_printerr("Could not launch application: %s\n",
error->message);
}
case GTK_RESPONSE_ACCEPT: {
GdkDisplay *display = gtk_widget_get_display(GTK_WIDGET(dialog));
gdk_clipboard_set_text(gdk_display_get_clipboard(display), data);
}
case GTK_RESPONSE_CANCEL:
break;
default:
g_printerr("Wrong dialog response: %d\n", response);
}
g_free(data);
gtk_window_destroy(GTK_WINDOW(dialog));
}
static void
on_zbar_code_tapped(GtkWidget *widget, const MPZBarCode *code)
{
GtkWidget *dialog;
GtkDialogFlags flags = GTK_DIALOG_MODAL | GTK_DIALOG_DESTROY_WITH_PARENT;
bool data_is_url =
g_uri_is_valid(code->data, G_URI_FLAGS_PARSE_RELAXED, NULL);
char *data = strdup(code->data);
if (data_is_url) {
dialog = gtk_message_dialog_new(
GTK_WINDOW(gtk_widget_get_root(widget)),
flags,
GTK_MESSAGE_QUESTION,
GTK_BUTTONS_NONE,
"Found a URL '%s' encoded in a %s.",
code->data,
code->type);
gtk_dialog_add_buttons(
GTK_DIALOG(dialog), "_Open URL", GTK_RESPONSE_YES, NULL);
} else {
dialog = gtk_message_dialog_new(
GTK_WINDOW(gtk_widget_get_root(widget)),
flags,
GTK_MESSAGE_QUESTION,
GTK_BUTTONS_NONE,
"Found data encoded in a %s.",
code->type);
gtk_message_dialog_format_secondary_markup(
GTK_MESSAGE_DIALOG(dialog), "%s", code->data);
}
gtk_dialog_add_buttons(GTK_DIALOG(dialog),
"_Copy",
GTK_RESPONSE_ACCEPT,
"_Cancel",
GTK_RESPONSE_CANCEL,
NULL);
g_signal_connect(
dialog, "response", G_CALLBACK(on_zbar_dialog_response), data);
gtk_widget_show(GTK_WIDGET(dialog));
}
static void
preview_pressed(GtkGestureClick *gesture, int n_press, double x, double y)
{
GtkWidget *widget =
gtk_event_controller_get_widget(GTK_EVENT_CONTROLLER(gesture));
int scale_factor = gtk_widget_get_scale_factor(widget);
// Tapped zbar result
if (zbar_result) {
// Transform the event coordinates to the image
float offset_x, offset_y, size_x, size_y;
position_preview(&offset_x, &offset_y, &size_x, &size_y);
int zbar_x = (x - offset_x) * scale_factor / size_x *
preview_buffer_width;
int zbar_y = (y - offset_y) * scale_factor / size_y *
preview_buffer_height;
for (uint8_t i = 0; i < zbar_result->size; ++i) {
MPZBarCode *code = &zbar_result->codes[i];
if (check_point_inside_bounds(zbar_x,
zbar_y,
code->bounds_x,
code->bounds_y)) {
on_zbar_code_tapped(widget, code);
return;
}
}
}
// Tapped preview image itself, try focussing
if (has_auto_focus_start) {
mp_io_pipeline_focus();
}
}
static void
run_camera_switch_action(GSimpleAction *action, GVariant *param, gpointer user_data)
{
size_t next_index = camera->index + 1;
const struct mp_camera_config *next_camera =
mp_get_camera_config(next_index);
if (!next_camera) {
next_index = 0;
next_camera = mp_get_camera_config(next_index);
}
camera = next_camera;
update_io_pipeline();
}
static void
run_open_settings_action(GSimpleAction *action, GVariant *param, gpointer user_data)
{
gtk_stack_set_visible_child_name(GTK_STACK(main_stack), "settings");
}
static void
run_close_settings_action(GSimpleAction *action, GVariant *param, gpointer user_data)
{
gtk_stack_set_visible_child_name(GTK_STACK(main_stack), "main");
// Update settings
bool save_dng = g_settings_get_boolean(settings, "save-raw");
if (save_dng != setting_save_dng) {
setting_save_dng = save_dng;
update_io_pipeline();
}
}
static void
on_controls_scale_changed(GtkAdjustment *adjustment, void (*set_fn)(double))
{
set_fn(gtk_adjustment_get_value(adjustment));
}
static void
update_value(GtkAdjustment *adjustment, GtkLabel *label)
{
char buf[12];
snprintf(buf, 12, "%.0f", gtk_adjustment_get_value(adjustment));
gtk_label_set_label(label, buf);
}
static void
on_auto_controls_toggled(GtkToggleButton *button, void (*set_auto_fn)(bool))
{
set_auto_fn(gtk_toggle_button_get_active(button));
}
static void
update_scale(GtkToggleButton *button, GtkScale *scale)
{
gtk_widget_set_sensitive(GTK_WIDGET(scale),
!gtk_toggle_button_get_active(button));
}
static void
open_controls(GtkWidget *parent,
const char *title_name,
double min_value,
double max_value,
double current,
bool auto_enabled,
void (*set_fn)(double),
void (*set_auto_fn)(bool))
{
GtkBuilder *builder = gtk_builder_new_from_resource(
"/org/postmarketos/Megapixels/controls-popover.ui");
GtkPopover *popover =
GTK_POPOVER(gtk_builder_get_object(builder, "controls"));
GtkScale *scale = GTK_SCALE(gtk_builder_get_object(builder, "scale"));
GtkLabel *title = GTK_LABEL(gtk_builder_get_object(builder, "title"));
GtkLabel *value_label =
GTK_LABEL(gtk_builder_get_object(builder, "value-label"));
GtkToggleButton *auto_button =
GTK_TOGGLE_BUTTON(gtk_builder_get_object(builder, "auto-button"));
gtk_label_set_label(title, title_name);
GtkAdjustment *adjustment = gtk_range_get_adjustment(GTK_RANGE(scale));
gtk_adjustment_set_lower(adjustment, min_value);
gtk_adjustment_set_upper(adjustment, max_value);
gtk_adjustment_set_value(adjustment, current);
update_value(adjustment, value_label);
gtk_toggle_button_set_active(auto_button, auto_enabled);
update_scale(auto_button, scale);
g_signal_connect(adjustment,
"value-changed",
G_CALLBACK(on_controls_scale_changed),
set_fn);
g_signal_connect(
adjustment, "value-changed", G_CALLBACK(update_value), value_label);
g_signal_connect(auto_button,
"toggled",
G_CALLBACK(on_auto_controls_toggled),
set_auto_fn);
g_signal_connect(auto_button, "toggled", G_CALLBACK(update_scale), scale);
gtk_widget_set_parent(GTK_WIDGET(popover), parent);
gtk_popover_popup(popover);
// g_object_unref(popover);
}
static void
set_gain(double value)
{
if (gain != (int)value) {
gain = value;
update_io_pipeline();
}
}
static void
set_gain_auto(bool is_auto)
{
if (gain_is_manual != !is_auto) {
gain_is_manual = !is_auto;
update_io_pipeline();
}
}
static void
open_iso_controls(GtkWidget *button, gpointer user_data)
{
open_controls(button,
"ISO",
0,
gain_max,
gain,
!gain_is_manual,
set_gain,
set_gain_auto);
}
static void
set_shutter(double value)
{
int new_exposure = (int)(value / 360.0 * camera->capture_mode.height);
if (new_exposure != exposure) {
exposure = new_exposure;
update_io_pipeline();
}
}
static void
set_shutter_auto(bool is_auto)
{
if (exposure_is_manual != !is_auto) {
exposure_is_manual = !is_auto;
update_io_pipeline();
}
}
static void
open_shutter_controls(GtkWidget *button, gpointer user_data)
{
open_controls(button,
"Shutter",
1.0,
360.0,
exposure,
!exposure_is_manual,
set_shutter,
set_shutter_auto);
}
static void
flash_button_clicked(GtkWidget *button, gpointer user_data)
{
flash_enabled = !flash_enabled;
update_io_pipeline();
const char *icon_name =
flash_enabled ? "flash-enabled-symbolic" : "flash-disabled-symbolic";
gtk_button_set_icon_name(GTK_BUTTON(button), icon_name);
}
static void
on_realize(GtkWidget *window, gpointer *data)
{
GtkNative *native = gtk_widget_get_native(window);
mp_process_pipeline_init_gl(gtk_native_get_surface(native));
camera = mp_get_camera_config(0);
update_io_pipeline();
}
static GSimpleAction *
create_simple_action(GtkApplication *app, const char *name, GCallback callback)
{
GSimpleAction *action = g_simple_action_new(name, NULL);
g_signal_connect(action, "activate", callback, app);
g_action_map_add_action(G_ACTION_MAP(app), G_ACTION(action));
return action;
}
static void
update_ui_rotation()
{
if (device_rotation == 0 || device_rotation == 180) {
// Portrait
gtk_widget_set_halign(preview_top_box, GTK_ALIGN_FILL);
gtk_orientable_set_orientation(GTK_ORIENTABLE(preview_top_box),
GTK_ORIENTATION_VERTICAL);
gtk_widget_set_halign(preview_bottom_box, GTK_ALIGN_FILL);
gtk_orientable_set_orientation(GTK_ORIENTABLE(preview_bottom_box),
GTK_ORIENTATION_HORIZONTAL);
if (device_rotation == 0) {
gtk_widget_set_valign(preview_top_box, GTK_ALIGN_START);
gtk_widget_set_valign(preview_bottom_box, GTK_ALIGN_END);
} else {
gtk_widget_set_valign(preview_top_box, GTK_ALIGN_END);
gtk_widget_set_valign(preview_bottom_box, GTK_ALIGN_START);
}
} else {
// Landscape
gtk_widget_set_valign(preview_top_box, GTK_ALIGN_FILL);
gtk_orientable_set_orientation(GTK_ORIENTABLE(preview_top_box),
GTK_ORIENTATION_HORIZONTAL);
gtk_widget_set_valign(preview_bottom_box, GTK_ALIGN_FILL);
gtk_orientable_set_orientation(GTK_ORIENTABLE(preview_bottom_box),
GTK_ORIENTATION_VERTICAL);
if (device_rotation == 90) {
gtk_widget_set_halign(preview_top_box, GTK_ALIGN_END);
gtk_widget_set_halign(preview_bottom_box, GTK_ALIGN_START);
} else {
gtk_widget_set_halign(preview_top_box, GTK_ALIGN_START);
gtk_widget_set_halign(preview_bottom_box, GTK_ALIGN_END);
}
}
}
static void
display_config_received(GDBusConnection *conn, GAsyncResult *res, gpointer user_data)
{
g_autoptr(GError) error = NULL;
g_autoptr(GVariant) result =
g_dbus_connection_call_finish(conn, res, &error);
if (!result) {
printf("Failed to get display configuration: %s\n", error->message);
return;
}
g_autoptr(GVariant) configs = g_variant_get_child_value(result, 1);
if (g_variant_n_children(configs) == 0) {
return;
}
g_autoptr(GVariant) config = g_variant_get_child_value(configs, 0);
g_autoptr(GVariant) rot_config = g_variant_get_child_value(config, 7);
uint32_t rotation_index = g_variant_get_uint32(rot_config);
assert(rotation_index < 4);
int new_rotation = rotation_index * 90;
if (new_rotation != device_rotation) {
device_rotation = new_rotation;
update_io_pipeline();
update_ui_rotation();
}
}
static void
update_screen_rotation(GDBusConnection *conn)
{
g_dbus_connection_call(conn,
"org.gnome.Mutter.DisplayConfig",
"/org/gnome/Mutter/DisplayConfig",
"org.gnome.Mutter.DisplayConfig",
"GetResources",
NULL,
NULL,
G_DBUS_CALL_FLAGS_NO_AUTO_START,
-1,
NULL,
(GAsyncReadyCallback)display_config_received,
NULL);
}
static void
on_screen_rotate(GDBusConnection *conn,
const gchar *sender_name,
const gchar *object_path,
const gchar *interface_name,
const gchar *signal_name,
GVariant *parameters,
gpointer user_data)
{
update_screen_rotation(conn);
}
static void
activate(GtkApplication *app, gpointer data)
{
g_object_set(gtk_settings_get_default(),
"gtk-application-prefer-dark-theme",
TRUE,
NULL);
GdkDisplay *display = gdk_display_get_default();
GtkIconTheme *icon_theme = gtk_icon_theme_get_for_display(display);
gtk_icon_theme_add_resource_path(icon_theme, "/org/postmarketos/Megapixels");
GtkCssProvider *provider = gtk_css_provider_new();
gtk_css_provider_load_from_resource(
provider, "/org/postmarketos/Megapixels/camera.css");
gtk_style_context_add_provider_for_display(
display,
GTK_STYLE_PROVIDER(provider),
GTK_STYLE_PROVIDER_PRIORITY_APPLICATION);
GtkBuilder *builder = gtk_builder_new_from_resource(
"/org/postmarketos/Megapixels/camera.ui");
GtkWidget *window = GTK_WIDGET(gtk_builder_get_object(builder, "window"));
GtkWidget *iso_button =
GTK_WIDGET(gtk_builder_get_object(builder, "iso-controls-button"));
GtkWidget *shutter_button = GTK_WIDGET(
gtk_builder_get_object(builder, "shutter-controls-button"));
flash_button =
GTK_WIDGET(gtk_builder_get_object(builder, "flash-controls-button"));
GtkWidget *setting_dng_button =
GTK_WIDGET(gtk_builder_get_object(builder, "setting-raw"));
preview = GTK_WIDGET(gtk_builder_get_object(builder, "preview"));
main_stack = GTK_WIDGET(gtk_builder_get_object(builder, "main_stack"));
open_last_stack =
GTK_WIDGET(gtk_builder_get_object(builder, "open_last_stack"));
thumb_last = GTK_WIDGET(gtk_builder_get_object(builder, "thumb_last"));
process_spinner =
GTK_WIDGET(gtk_builder_get_object(builder, "process_spinner"));
scanned_codes = GTK_WIDGET(gtk_builder_get_object(builder, "scanned-codes"));
preview_top_box = GTK_WIDGET(gtk_builder_get_object(builder, "top-box"));
preview_bottom_box =
GTK_WIDGET(gtk_builder_get_object(builder, "bottom-box"));
g_signal_connect(window, "realize", G_CALLBACK(on_realize), NULL);
g_signal_connect(preview, "realize", G_CALLBACK(preview_realize), NULL);
g_signal_connect(preview, "render", G_CALLBACK(preview_draw), NULL);
g_signal_connect(preview, "resize", G_CALLBACK(preview_resize), NULL);
GtkGesture *click = gtk_gesture_click_new();
g_signal_connect(click, "pressed", G_CALLBACK(preview_pressed), NULL);
gtk_widget_add_controller(preview, GTK_EVENT_CONTROLLER(click));
g_signal_connect(iso_button, "clicked", G_CALLBACK(open_iso_controls), NULL);
g_signal_connect(
shutter_button, "clicked", G_CALLBACK(open_shutter_controls), NULL);
g_signal_connect(
flash_button, "clicked", G_CALLBACK(flash_button_clicked), NULL);
// Setup actions
create_simple_action(app, "capture", G_CALLBACK(run_capture_action));
create_simple_action(
app, "switch-camera", G_CALLBACK(run_camera_switch_action));
create_simple_action(
app, "open-settings", G_CALLBACK(run_open_settings_action));
create_simple_action(
app, "close-settings", G_CALLBACK(run_close_settings_action));
create_simple_action(app, "open-last", G_CALLBACK(run_open_last_action));
create_simple_action(app, "open-photos", G_CALLBACK(run_open_photos_action));
create_simple_action(app, "about", G_CALLBACK(run_about_action));
create_simple_action(app, "quit", G_CALLBACK(run_quit_action));
// Setup shortcuts
const char *capture_accels[] = { "space", NULL };
gtk_application_set_accels_for_action(app, "app.capture", capture_accels);
const char *quit_accels[] = { "q", "w", NULL };
gtk_application_set_accels_for_action(app, "app.quit", quit_accels);
// Setup settings
settings = g_settings_new("org.postmarketos.Megapixels");
g_settings_bind(settings,
"save-raw",
setting_dng_button,
"active",
G_SETTINGS_BIND_DEFAULT);
setting_save_dng = g_settings_get_boolean(settings, "save-raw");
// Listen for phosh rotation
GDBusConnection *conn =
g_application_get_dbus_connection(G_APPLICATION(app));
g_dbus_connection_signal_subscribe(conn,
NULL,
"org.gnome.Mutter.DisplayConfig",
"MonitorsChanged",
"/org/gnome/Mutter/DisplayConfig",
NULL,
G_DBUS_SIGNAL_FLAGS_NONE,
&on_screen_rotate,
NULL,
NULL);
update_screen_rotation(conn);
// Initialize display flash
mp_flash_gtk_init(conn);
mp_io_pipeline_start();
gtk_application_add_window(app, GTK_WINDOW(window));
gtk_widget_show(window);
}
static void
shutdown(GApplication *app, gpointer data)
{
// Only do cleanup in development, let the OS clean up otherwise
#ifdef DEBUG
mp_io_pipeline_stop();
mp_flash_gtk_clean();
#endif
}
int
main(int argc, char *argv[])
{
#ifdef RENDERDOC
{
void *mod = dlopen("librenderdoc.so", RTLD_NOW | RTLD_NOLOAD);
if (mod) {
pRENDERDOC_GetAPI RENDERDOC_GetAPI =
(pRENDERDOC_GetAPI)dlsym(mod, "RENDERDOC_GetAPI");
int ret = RENDERDOC_GetAPI(eRENDERDOC_API_Version_1_1_2,
(void **)&rdoc_api);
assert(ret == 1);
} else {
printf("Renderdoc not found\n");
}
}
#endif
if (!mp_load_config())
return 1;
setenv("LC_NUMERIC", "C", 1);
GtkApplication *app = gtk_application_new("org.postmarketos.Megapixels", 0);
g_signal_connect(app, "activate", G_CALLBACK(activate), NULL);
g_signal_connect(app, "shutdown", G_CALLBACK(shutdown), NULL);
g_application_run(G_APPLICATION(app), argc, argv);
return 0;
}
megapixels-1.4.3/src/main.h 0000664 0000000 0000000 00000001451 14155633267 0015556 0 ustar 00root root 0000000 0000000 #pragma once
#include "camera_config.h"
#include "gtk/gtk.h"
#include "process_pipeline.h"
#include "zbar_pipeline.h"
struct mp_main_state {
const struct mp_camera_config *camera;
MPCameraMode mode;
int image_width;
int image_height;
bool gain_is_manual;
int gain;
int gain_max;
bool exposure_is_manual;
int exposure;
bool has_auto_focus_continuous;
bool has_auto_focus_start;
};
void mp_main_update_state(const struct mp_main_state *state);
void mp_main_set_preview(MPProcessPipelineBuffer *buffer);
void mp_main_capture_completed(GdkTexture *thumb, const char *fname);
void mp_main_set_zbar_result(MPZBarScanResult *result);
int remap(int value, int input_min, int input_max, int output_min, int output_max);
megapixels-1.4.3/src/matrix.c 0000664 0000000 0000000 00000001254 14155633267 0016132 0 ustar 00root root 0000000 0000000 #include
void
print_matrix(float m[9])
{
printf(" [%.2f %.2f %.2f] \n", m[0], m[1], m[2]);
printf(" [%.2f %.2f %.2f] \n", m[3], m[4], m[5]);
printf(" [%.2f %.2f %.2f] \n\n", m[6], m[7], m[8]);
}
void
multiply_matrices(float a[9], float b[9], float out[9])
{
// zero out target matrix
for (int i = 0; i < 9; i++) {
out[i] = 0;
}
for (int i = 0; i < 3; i++) {
for (int j = 0; j < 3; j++) {
for (int k = 0; k < 3; k++) {
out[i * 3 + j] += a[i * 3 + k] * b[k * 3 + j];
}
}
}
}
megapixels-1.4.3/src/matrix.h 0000664 0000000 0000000 00000000076 14155633267 0016140 0 ustar 00root root 0000000 0000000 void multiply_matrices(float a[9], float b[9], float out[9]);
megapixels-1.4.3/src/pipeline.c 0000664 0000000 0000000 00000007443 14155633267 0016441 0 ustar 00root root 0000000 0000000 #include "pipeline.h"
#include
#include
#include
struct _MPPipeline {
GMainContext *main_context;
GMainLoop *main_loop;
pthread_t thread;
};
static void *
thread_main_loop(void *arg)
{
MPPipeline *pipeline = arg;
g_main_loop_run(pipeline->main_loop);
return NULL;
}
MPPipeline *
mp_pipeline_new()
{
MPPipeline *pipeline = malloc(sizeof(MPPipeline));
pipeline->main_context = g_main_context_new();
pipeline->main_loop = g_main_loop_new(pipeline->main_context, false);
int res =
pthread_create(&pipeline->thread, NULL, thread_main_loop, pipeline);
assert(res == 0);
return pipeline;
}
struct invoke_args {
MPPipeline *pipeline;
MPPipelineCallback callback;
};
static bool
invoke_impl(struct invoke_args *args)
{
args->callback(args->pipeline, args + 1);
return false;
}
void
mp_pipeline_invoke(MPPipeline *pipeline,
MPPipelineCallback callback,
const void *data,
size_t size)
{
if (pthread_self() != pipeline->thread) {
struct invoke_args *args = malloc(sizeof(struct invoke_args) + size);
args->pipeline = pipeline;
args->callback = callback;
if (size > 0) {
memcpy(args + 1, data, size);
}
g_main_context_invoke_full(pipeline->main_context,
G_PRIORITY_DEFAULT,
(GSourceFunc)invoke_impl,
args,
free);
} else {
callback(pipeline, data);
}
}
static bool
unlock_mutex(GMutex *mutex)
{
g_mutex_unlock(mutex);
return false;
}
void
mp_pipeline_sync(MPPipeline *pipeline)
{
GMutex mutex;
g_mutex_init(&mutex);
g_mutex_lock(&mutex);
g_main_context_invoke_full(pipeline->main_context,
G_PRIORITY_LOW,
(GSourceFunc)unlock_mutex,
&mutex,
NULL);
g_mutex_lock(&mutex);
g_mutex_unlock(&mutex);
g_mutex_clear(&mutex);
}
void
mp_pipeline_free(MPPipeline *pipeline)
{
g_main_loop_quit(pipeline->main_loop);
// Force the main thread loop to wake up, otherwise we might not exit
g_main_context_wakeup(pipeline->main_context);
void *r;
pthread_join(pipeline->thread, &r);
free(pipeline);
}
struct capture_source_args {
MPCamera *camera;
void (*callback)(MPBuffer, void *);
void *user_data;
};
static bool
on_capture(int fd, GIOCondition condition, struct capture_source_args *args)
{
MPBuffer buffer;
if (mp_camera_capture_buffer(args->camera, &buffer)) {
args->callback(buffer, args->user_data);
}
return true;
}
// Not thread safe
GSource *
mp_pipeline_add_capture_source(MPPipeline *pipeline,
MPCamera *camera,
void (*callback)(MPBuffer, void *),
void *user_data)
{
int video_fd = mp_camera_get_video_fd(camera);
GSource *video_source = g_unix_fd_source_new(video_fd, G_IO_IN);
struct capture_source_args *args =
malloc(sizeof(struct capture_source_args));
args->camera = camera;
args->callback = callback;
args->user_data = user_data;
g_source_set_callback(video_source, (GSourceFunc)on_capture, args, free);
g_source_attach(video_source, pipeline->main_context);
return video_source;
}
megapixels-1.4.3/src/pipeline.h 0000664 0000000 0000000 00000001414 14155633267 0016436 0 ustar 00root root 0000000 0000000 #pragma once
#include "camera.h"
#include "device.h"
#include
typedef struct _MPPipeline MPPipeline;
typedef void (*MPPipelineCallback)(MPPipeline *, const void *);
MPPipeline *mp_pipeline_new();
void mp_pipeline_invoke(MPPipeline *pipeline,
MPPipelineCallback callback,
const void *data,
size_t size);
// Wait until all pending tasks have completed
void mp_pipeline_sync(MPPipeline *pipeline);
void mp_pipeline_free(MPPipeline *pipeline);
GSource *mp_pipeline_add_capture_source(MPPipeline *pipeline,
MPCamera *camera,
void (*callback)(MPBuffer, void *),
void *user_data);
megapixels-1.4.3/src/process_pipeline.c 0000664 0000000 0000000 00000064134 14155633267 0020177 0 ustar 00root root 0000000 0000000 #include "process_pipeline.h"
#include "config.h"
#include "gles2_debayer.h"
#include "io_pipeline.h"
#include "main.h"
#include "pipeline.h"
#include "zbar_pipeline.h"
#include
#include
#include
#include
#include "gl_util.h"
#include
#define TIFFTAG_FORWARDMATRIX1 50964
static const float colormatrix_srgb[] = { 3.2409, -1.5373, -0.4986, -0.9692, 1.8759,
0.0415, 0.0556, -0.2039, 1.0569 };
static MPPipeline *pipeline;
static char burst_dir[23];
static char processing_script[512];
static volatile bool is_capturing = false;
static volatile int frames_processed = 0;
static volatile int frames_received = 0;
static const struct mp_camera_config *camera;
static int camera_rotation;
static MPCameraMode mode;
static int burst_length;
static int captures_remaining = 0;
static int preview_width;
static int preview_height;
static int device_rotation;
static int output_buffer_width = -1;
static int output_buffer_height = -1;
// static bool gain_is_manual;
static int gain;
static int gain_max;
static bool exposure_is_manual;
static int exposure;
static bool save_dng;
static char capture_fname[255];
static void
register_custom_tiff_tags(TIFF *tif)
{
static const TIFFFieldInfo custom_fields[] = {
{ TIFFTAG_FORWARDMATRIX1,
-1,
-1,
TIFF_SRATIONAL,
FIELD_CUSTOM,
1,
1,
"ForwardMatrix1" },
};
// Add missing dng fields
TIFFMergeFieldInfo(tif,
custom_fields,
sizeof(custom_fields) / sizeof(custom_fields[0]));
}
static bool
find_processor(char *script)
{
char filename[] = "postprocess.sh";
// Check postprocess.sh in the current working directory
sprintf(script, "./data/%s", filename);
if (access(script, F_OK) != -1) {
sprintf(script, "./data/%s", filename);
printf("Found postprocessor script at %s\n", script);
return true;
}
// Check for a script in XDG_CONFIG_HOME
sprintf(script, "%s/megapixels/%s", g_get_user_config_dir(), filename);
if (access(script, F_OK) != -1) {
printf("Found postprocessor script at %s\n", script);
return true;
}
// Check user overridden /etc/megapixels/postprocessor.sh
sprintf(script, "%s/megapixels/%s", SYSCONFDIR, filename);
if (access(script, F_OK) != -1) {
printf("Found postprocessor script at %s\n", script);
return true;
}
// Check packaged /usr/share/megapixels/postprocessor.sh
sprintf(script, "%s/megapixels/%s", DATADIR, filename);
if (access(script, F_OK) != -1) {
printf("Found postprocessor script at %s\n", script);
return true;
}
return false;
}
static void
setup(MPPipeline *pipeline, const void *data)
{
TIFFSetTagExtender(register_custom_tiff_tags);
if (!find_processor(processing_script)) {
g_printerr("Could not find any post-process script\n");
exit(1);
}
}
void
mp_process_pipeline_start()
{
pipeline = mp_pipeline_new();
mp_pipeline_invoke(pipeline, setup, NULL, 0);
mp_zbar_pipeline_start();
}
void
mp_process_pipeline_stop()
{
mp_pipeline_free(pipeline);
mp_zbar_pipeline_stop();
}
void
mp_process_pipeline_sync()
{
mp_pipeline_sync(pipeline);
}
#define NUM_BUFFERS 4
struct _MPProcessPipelineBuffer {
GLuint texture_id;
_Atomic(int) refcount;
};
static MPProcessPipelineBuffer output_buffers[NUM_BUFFERS];
void
mp_process_pipeline_buffer_ref(MPProcessPipelineBuffer *buf)
{
++buf->refcount;
}
void
mp_process_pipeline_buffer_unref(MPProcessPipelineBuffer *buf)
{
--buf->refcount;
}
uint32_t
mp_process_pipeline_buffer_get_texture_id(MPProcessPipelineBuffer *buf)
{
return buf->texture_id;
}
static GLES2Debayer *gles2_debayer = NULL;
static GdkGLContext *context;
// #define RENDERDOC
#ifdef RENDERDOC
#include
extern RENDERDOC_API_1_1_2 *rdoc_api;
#endif
static void
init_gl(MPPipeline *pipeline, GdkSurface **surface)
{
GError *error = NULL;
context = gdk_surface_create_gl_context(*surface, &error);
if (context == NULL) {
printf("Failed to initialize OpenGL context: %s\n", error->message);
g_clear_error(&error);
return;
}
gdk_gl_context_set_use_es(context, true);
gdk_gl_context_set_required_version(context, 2, 0);
gdk_gl_context_set_forward_compatible(context, false);
#ifdef DEBUG
gdk_gl_context_set_debug_enabled(context, true);
#else
gdk_gl_context_set_debug_enabled(context, false);
#endif
gdk_gl_context_realize(context, &error);
if (error != NULL) {
printf("Failed to create OpenGL context: %s\n", error->message);
g_clear_object(&context);
g_clear_error(&error);
return;
}
gdk_gl_context_make_current(context);
check_gl();
// Make a VAO for OpenGL
if (!gdk_gl_context_get_use_es(context)) {
GLuint vao;
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
check_gl();
}
gles2_debayer = gles2_debayer_new(MP_PIXEL_FMT_BGGR8);
check_gl();
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
check_gl();
gles2_debayer_use(gles2_debayer);
for (size_t i = 0; i < NUM_BUFFERS; ++i) {
glGenTextures(1, &output_buffers[i].texture_id);
glBindTexture(GL_TEXTURE_2D, output_buffers[i].texture_id);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
}
glBindTexture(GL_TEXTURE_2D, 0);
gboolean is_es = gdk_gl_context_get_use_es(context);
int major, minor;
gdk_gl_context_get_version(context, &major, &minor);
printf("Initialized %s %d.%d\n",
is_es ? "OpenGL ES" : "OpenGL",
major,
minor);
}
void
mp_process_pipeline_init_gl(GdkSurface *surface)
{
mp_pipeline_invoke(pipeline,
(MPPipelineCallback)init_gl,
&surface,
sizeof(GdkSurface *));
}
static GdkTexture *
process_image_for_preview(const uint8_t *image)
{
#ifdef PROFILE_DEBAYER
clock_t t1 = clock();
#endif
// Pick an available buffer
MPProcessPipelineBuffer *output_buffer = NULL;
for (size_t i = 0; i < NUM_BUFFERS; ++i) {
if (output_buffers[i].refcount == 0) {
output_buffer = &output_buffers[i];
}
}
if (output_buffer == NULL) {
return NULL;
}
assert(output_buffer != NULL);
#ifdef RENDERDOC
if (rdoc_api) {
rdoc_api->StartFrameCapture(NULL, NULL);
}
#endif
// Copy image to a GL texture. TODO: This can be avoided
GLuint input_texture;
glGenTextures(1, &input_texture);
glBindTexture(GL_TEXTURE_2D, input_texture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexImage2D(GL_TEXTURE_2D,
0,
GL_LUMINANCE,
mode.width,
mode.height,
0,
GL_LUMINANCE,
GL_UNSIGNED_BYTE,
image);
check_gl();
gles2_debayer_process(
gles2_debayer, output_buffer->texture_id, input_texture);
check_gl();
glFinish();
glDeleteTextures(1, &input_texture);
#ifdef PROFILE_DEBAYER
clock_t t2 = clock();
printf("process_image_for_preview %fms\n",
(float)(t2 - t1) / CLOCKS_PER_SEC * 1000);
#endif
#ifdef RENDERDOC
if (rdoc_api) {
rdoc_api->EndFrameCapture(NULL, NULL);
}
#endif
mp_process_pipeline_buffer_ref(output_buffer);
mp_main_set_preview(output_buffer);
// Create a thumbnail from the preview for the last capture
GdkTexture *thumb = NULL;
if (captures_remaining == 1) {
printf("Making thumbnail\n");
size_t size = output_buffer_width * output_buffer_height *
sizeof(uint32_t);
uint32_t *data = g_malloc_n(size, 1);
glReadPixels(0,
0,
output_buffer_width,
output_buffer_height,
GL_RGBA,
GL_UNSIGNED_BYTE,
data);
check_gl();
// Flip vertically
for (size_t y = 0; y < output_buffer_height / 2; ++y) {
for (size_t x = 0; x < output_buffer_width; ++x) {
uint32_t tmp = data[(output_buffer_height - y - 1) *
output_buffer_width +
x];
data[(output_buffer_height - y - 1) *
output_buffer_width +
x] = data[y * output_buffer_width + x];
data[y * output_buffer_width + x] = tmp;
}
}
thumb = gdk_memory_texture_new(output_buffer_width,
output_buffer_height,
GDK_MEMORY_R8G8B8A8,
g_bytes_new_take(data, size),
output_buffer_width *
sizeof(uint32_t));
}
return thumb;
}
static void
process_image_for_capture(const uint8_t *image, int count)
{
time_t rawtime;
time(&rawtime);
struct tm tim = *(localtime(&rawtime));
char datetime[20] = { 0 };
strftime(datetime, 20, "%Y:%m:%d %H:%M:%S", &tim);
char fname[255];
sprintf(fname, "%s/%d.dng", burst_dir, count);
TIFF *tif = TIFFOpen(fname, "w");
if (!tif) {
printf("Could not open tiff\n");
}
// Define TIFF thumbnail
TIFFSetField(tif, TIFFTAG_SUBFILETYPE, 1);
TIFFSetField(tif, TIFFTAG_IMAGEWIDTH, mode.width >> 4);
TIFFSetField(tif, TIFFTAG_IMAGELENGTH, mode.height >> 4);
TIFFSetField(tif, TIFFTAG_BITSPERSAMPLE, 8);
TIFFSetField(tif, TIFFTAG_COMPRESSION, COMPRESSION_NONE);
TIFFSetField(tif, TIFFTAG_PHOTOMETRIC, PHOTOMETRIC_RGB);
TIFFSetField(tif, TIFFTAG_MAKE, mp_get_device_make());
TIFFSetField(tif, TIFFTAG_MODEL, mp_get_device_model());
uint16_t orientation;
if (camera_rotation == 0) {
orientation = camera->mirrored ? ORIENTATION_TOPRIGHT :
ORIENTATION_TOPLEFT;
} else if (camera_rotation == 90) {
orientation = camera->mirrored ? ORIENTATION_RIGHTBOT :
ORIENTATION_LEFTBOT;
} else if (camera_rotation == 180) {
orientation = camera->mirrored ? ORIENTATION_BOTLEFT :
ORIENTATION_BOTRIGHT;
} else {
orientation = camera->mirrored ? ORIENTATION_LEFTTOP :
ORIENTATION_RIGHTTOP;
}
TIFFSetField(tif, TIFFTAG_ORIENTATION, orientation);
TIFFSetField(tif, TIFFTAG_DATETIME, datetime);
TIFFSetField(tif, TIFFTAG_SAMPLESPERPIXEL, 3);
TIFFSetField(tif, TIFFTAG_PLANARCONFIG, PLANARCONFIG_CONTIG);
TIFFSetField(tif, TIFFTAG_SOFTWARE, "Megapixels");
long sub_offset = 0;
TIFFSetField(tif, TIFFTAG_SUBIFD, 1, &sub_offset);
TIFFSetField(tif, TIFFTAG_DNGVERSION, "\001\001\0\0");
TIFFSetField(tif, TIFFTAG_DNGBACKWARDVERSION, "\001\0\0\0");
char uniquecameramodel[255];
sprintf(uniquecameramodel,
"%s %s",
mp_get_device_make(),
mp_get_device_model());
TIFFSetField(tif, TIFFTAG_UNIQUECAMERAMODEL, uniquecameramodel);
if (camera->colormatrix[0]) {
TIFFSetField(tif, TIFFTAG_COLORMATRIX1, 9, camera->colormatrix);
} else {
TIFFSetField(tif, TIFFTAG_COLORMATRIX1, 9, colormatrix_srgb);
}
if (camera->forwardmatrix[0]) {
TIFFSetField(tif, TIFFTAG_FORWARDMATRIX1, 9, camera->forwardmatrix);
}
static const float neutral[] = { 1.0, 1.0, 1.0 };
TIFFSetField(tif, TIFFTAG_ASSHOTNEUTRAL, 3, neutral);
TIFFSetField(tif, TIFFTAG_CALIBRATIONILLUMINANT1, 21);
// Write black thumbnail, only windows uses this
{
unsigned char *buf =
(unsigned char *)calloc(1, (mode.width >> 4) * 3);
for (int row = 0; row < (mode.height >> 4); row++) {
TIFFWriteScanline(tif, buf, row, 0);
}
free(buf);
}
TIFFWriteDirectory(tif);
// Define main photo
TIFFSetField(tif, TIFFTAG_SUBFILETYPE, 0);
TIFFSetField(tif, TIFFTAG_IMAGEWIDTH, mode.width);
TIFFSetField(tif, TIFFTAG_IMAGELENGTH, mode.height);
TIFFSetField(tif,
TIFFTAG_BITSPERSAMPLE,
mp_pixel_format_bits_per_pixel(mode.pixel_format));
TIFFSetField(tif, TIFFTAG_PHOTOMETRIC, PHOTOMETRIC_CFA);
TIFFSetField(tif, TIFFTAG_SAMPLESPERPIXEL, 1);
TIFFSetField(tif, TIFFTAG_PLANARCONFIG, PLANARCONFIG_CONTIG);
static const short cfapatterndim[] = { 2, 2 };
TIFFSetField(tif, TIFFTAG_CFAREPEATPATTERNDIM, cfapatterndim);
#if (TIFFLIB_VERSION < 20201219) && !LIBTIFF_CFA_PATTERN
TIFFSetField(tif, TIFFTAG_CFAPATTERN, "\002\001\001\000"); // BGGR
#else
TIFFSetField(tif, TIFFTAG_CFAPATTERN, 4, "\002\001\001\000"); // BGGR
#endif
printf("TIFF version %d\n", TIFFLIB_VERSION);
int whitelevel = camera->whitelevel;
if (!whitelevel) {
whitelevel =
(1 << mp_pixel_format_pixel_depth(mode.pixel_format)) - 1;
}
TIFFSetField(tif, TIFFTAG_WHITELEVEL, 1, &whitelevel);
if (camera->blacklevel) {
const float blacklevel = camera->blacklevel;
TIFFSetField(tif, TIFFTAG_BLACKLEVEL, 1, &blacklevel);
}
TIFFCheckpointDirectory(tif);
printf("Writing frame to %s\n", fname);
for (int row = 0; row < mode.height; row++) {
TIFFWriteScanline(
tif,
(void *)image +
(row * mp_pixel_format_width_to_bytes(
mode.pixel_format, mode.width)),
row,
0);
}
TIFFWriteDirectory(tif);
// Add an EXIF block to the tiff
TIFFCreateEXIFDirectory(tif);
// 1 = manual, 2 = full auto, 3 = aperture priority, 4 = shutter priority
if (!exposure_is_manual) {
TIFFSetField(tif, EXIFTAG_EXPOSUREPROGRAM, 2);
} else {
TIFFSetField(tif, EXIFTAG_EXPOSUREPROGRAM, 1);
}
TIFFSetField(tif,
EXIFTAG_EXPOSURETIME,
(mode.frame_interval.numerator /
(float)mode.frame_interval.denominator) /
((float)mode.height / (float)exposure));
if (camera->iso_min && camera->iso_max) {
uint16_t isospeed = remap(
gain - 1, 0, gain_max, camera->iso_min, camera->iso_max);
TIFFSetField(tif, EXIFTAG_ISOSPEEDRATINGS, 1, &isospeed);
}
TIFFSetField(tif, EXIFTAG_FLASH, 0);
TIFFSetField(tif, EXIFTAG_DATETIMEORIGINAL, datetime);
TIFFSetField(tif, EXIFTAG_DATETIMEDIGITIZED, datetime);
if (camera->fnumber) {
TIFFSetField(tif, EXIFTAG_FNUMBER, camera->fnumber);
}
if (camera->focallength) {
TIFFSetField(tif, EXIFTAG_FOCALLENGTH, camera->focallength);
}
if (camera->focallength && camera->cropfactor) {
TIFFSetField(tif,
EXIFTAG_FOCALLENGTHIN35MMFILM,
(short)(camera->focallength * camera->cropfactor));
}
uint64_t exif_offset = 0;
TIFFWriteCustomDirectory(tif, &exif_offset);
TIFFFreeDirectory(tif);
// Update exif pointer
TIFFSetDirectory(tif, 0);
TIFFSetField(tif, TIFFTAG_EXIFIFD, exif_offset);
TIFFRewriteDirectory(tif);
TIFFClose(tif);
}
static void
post_process_finished(GSubprocess *proc, GAsyncResult *res, GdkTexture *thumb)
{
char *stdout;
g_subprocess_communicate_utf8_finish(proc, res, &stdout, NULL, NULL);
// The last line contains the file name
int end = strlen(stdout);
// Skip the newline at the end
stdout[--end] = '\0';
char *path = path = stdout + end - 1;
do {
if (*path == '\n') {
path++;
break;
}
--path;
} while (path > stdout);
mp_main_capture_completed(thumb, path);
}
static void
process_capture_burst(GdkTexture *thumb)
{
time_t rawtime;
time(&rawtime);
struct tm tim = *(localtime(&rawtime));
char timestamp[30];
strftime(timestamp, 30, "%Y%m%d%H%M%S", &tim);
if (g_get_user_special_dir(G_USER_DIRECTORY_PICTURES) != NULL) {
sprintf(capture_fname,
"%s/IMG%s",
g_get_user_special_dir(G_USER_DIRECTORY_PICTURES),
timestamp);
} else if (getenv("XDG_PICTURES_DIR") != NULL) {
sprintf(capture_fname,
"%s/IMG%s",
getenv("XDG_PICTURES_DIR"),
timestamp);
} else {
sprintf(capture_fname,
"%s/Pictures/IMG%s",
getenv("HOME"),
timestamp);
}
char save_dng_s[2] = "0";
if (save_dng) {
save_dng_s[0] = '1';
}
// Start post-processing the captured burst
g_print("Post process %s to %s.ext (save-dng %s)\n",
burst_dir,
capture_fname,
save_dng_s);
g_autoptr(GError) error = NULL;
GSubprocess *proc = g_subprocess_new(G_SUBPROCESS_FLAGS_STDOUT_PIPE,
&error,
processing_script,
burst_dir,
capture_fname,
save_dng_s,
NULL);
if (!proc) {
g_printerr("Failed to spawn postprocess process: %s\n",
error->message);
return;
}
g_subprocess_communicate_utf8_async(
proc, NULL, NULL, (GAsyncReadyCallback)post_process_finished, thumb);
}
static void
process_image(MPPipeline *pipeline, const MPBuffer *buffer)
{
#ifdef PROFILE_PROCESS
clock_t t1 = clock();
#endif
size_t size = mp_pixel_format_width_to_bytes(mode.pixel_format, mode.width) *
mode.height;
uint8_t *image = malloc(size);
memcpy(image, buffer->data, size);
mp_io_pipeline_release_buffer(buffer->index);
MPZBarImage *zbar_image = mp_zbar_image_new(image,
mode.pixel_format,
mode.width,
mode.height,
camera_rotation,
camera->mirrored);
mp_zbar_pipeline_process_image(mp_zbar_image_ref(zbar_image));
#ifdef PROFILE_PROCESS
clock_t t2 = clock();
#endif
GdkTexture *thumb = process_image_for_preview(image);
if (captures_remaining > 0) {
int count = burst_length - captures_remaining;
--captures_remaining;
process_image_for_capture(image, count);
if (captures_remaining == 0) {
assert(thumb);
process_capture_burst(thumb);
} else {
assert(!thumb);
}
} else {
assert(!thumb);
}
mp_zbar_image_unref(zbar_image);
++frames_processed;
if (captures_remaining == 0) {
is_capturing = false;
}
#ifdef PROFILE_PROCESS
clock_t t3 = clock();
printf("process_image %fms, step 1:%fms, step 2:%fms\n",
(float)(t3 - t1) / CLOCKS_PER_SEC * 1000,
(float)(t2 - t1) / CLOCKS_PER_SEC * 1000,
(float)(t3 - t2) / CLOCKS_PER_SEC * 1000);
#endif
}
void
mp_process_pipeline_process_image(MPBuffer buffer)
{
// If we haven't processed the previous frame yet, drop this one
if (frames_received != frames_processed && !is_capturing) {
mp_io_pipeline_release_buffer(buffer.index);
return;
}
++frames_received;
mp_pipeline_invoke(pipeline,
(MPPipelineCallback)process_image,
&buffer,
sizeof(MPBuffer));
}
static void
capture()
{
char template[] = "/tmp/megapixels.XXXXXX";
char *tempdir;
tempdir = mkdtemp(template);
if (tempdir == NULL) {
g_printerr("Could not make capture directory %s\n", template);
exit(EXIT_FAILURE);
}
strcpy(burst_dir, tempdir);
captures_remaining = burst_length;
}
void
mp_process_pipeline_capture()
{
is_capturing = true;
mp_pipeline_invoke(pipeline, capture, NULL, 0);
}
static void
on_output_changed()
{
output_buffer_width = mode.width / 2;
output_buffer_height = mode.height / 2;
if (camera->rotate != 0 || camera->rotate != 180) {
int tmp = output_buffer_width;
output_buffer_width = output_buffer_height;
output_buffer_height = tmp;
}
for (size_t i = 0; i < NUM_BUFFERS; ++i) {
glBindTexture(GL_TEXTURE_2D, output_buffers[i].texture_id);
glTexImage2D(GL_TEXTURE_2D,
0,
GL_RGBA,
output_buffer_width,
output_buffer_height,
0,
GL_RGBA,
GL_UNSIGNED_BYTE,
NULL);
}
glBindTexture(GL_TEXTURE_2D, 0);
gles2_debayer_configure(
gles2_debayer,
output_buffer_width,
output_buffer_height,
mode.width,
mode.height,
camera->rotate,
camera->mirrored,
camera->previewmatrix[0] == 0 ? NULL : camera->previewmatrix,
camera->blacklevel);
}
static int
mod(int a, int b)
{
int r = a % b;
return r < 0 ? r + b : r;
}
static void
update_state(MPPipeline *pipeline, const struct mp_process_pipeline_state *state)
{
const bool output_changed =
!mp_camera_mode_is_equivalent(&mode, &state->mode) ||
preview_width != state->preview_width ||
preview_height != state->preview_height ||
device_rotation != state->device_rotation;
camera = state->camera;
mode = state->mode;
preview_width = state->preview_width;
preview_height = state->preview_height;
device_rotation = state->device_rotation;
burst_length = state->burst_length;
save_dng = state->save_dng;
// gain_is_manual = state->gain_is_manual;
gain = state->gain;
gain_max = state->gain_max;
exposure_is_manual = state->exposure_is_manual;
exposure = state->exposure;
if (output_changed) {
camera_rotation = mod(camera->rotate - device_rotation, 360);
on_output_changed();
}
struct mp_main_state main_state = {
.camera = camera,
.mode = mode,
.image_width = output_buffer_width,
.image_height = output_buffer_height,
.gain_is_manual = state->gain_is_manual,
.gain = gain,
.gain_max = gain_max,
.exposure_is_manual = exposure_is_manual,
.exposure = exposure,
.has_auto_focus_continuous = state->has_auto_focus_continuous,
.has_auto_focus_start = state->has_auto_focus_start,
};
mp_main_update_state(&main_state);
}
void
mp_process_pipeline_update_state(const struct mp_process_pipeline_state *new_state)
{
mp_pipeline_invoke(pipeline,
(MPPipelineCallback)update_state,
new_state,
sizeof(struct mp_process_pipeline_state));
}
// GTK4 seems to require this
void
pango_fc_font_get_languages()
{
}
megapixels-1.4.3/src/process_pipeline.h 0000664 0000000 0000000 00000002202 14155633267 0020170 0 ustar 00root root 0000000 0000000 #pragma once
#include "camera_config.h"
typedef struct _GdkSurface GdkSurface;
struct mp_process_pipeline_state {
const struct mp_camera_config *camera;
MPCameraMode mode;
int burst_length;
int preview_width;
int preview_height;
int device_rotation;
bool gain_is_manual;
int gain;
int gain_max;
bool exposure_is_manual;
int exposure;
bool has_auto_focus_continuous;
bool has_auto_focus_start;
bool save_dng;
};
void mp_process_pipeline_start();
void mp_process_pipeline_stop();
void mp_process_pipeline_sync();
void mp_process_pipeline_init_gl(GdkSurface *window);
void mp_process_pipeline_process_image(MPBuffer buffer);
void mp_process_pipeline_capture();
void mp_process_pipeline_update_state(const struct mp_process_pipeline_state *state);
typedef struct _MPProcessPipelineBuffer MPProcessPipelineBuffer;
void mp_process_pipeline_buffer_ref(MPProcessPipelineBuffer *buf);
void mp_process_pipeline_buffer_unref(MPProcessPipelineBuffer *buf);
uint32_t mp_process_pipeline_buffer_get_texture_id(MPProcessPipelineBuffer *buf);
megapixels-1.4.3/src/renderdoc/ 0000775 0000000 0000000 00000000000 14155633267 0016425 5 ustar 00root root 0000000 0000000 megapixels-1.4.3/src/renderdoc/app.h 0000664 0000000 0000000 00000070375 14155633267 0017372 0 ustar 00root root 0000000 0000000 /******************************************************************************
* The MIT License (MIT)
*
* Copyright (c) 2019-2021 Baldur Karlsson
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
******************************************************************************/
#pragma once
//////////////////////////////////////////////////////////////////////////////////////////////////
//
// Documentation for the API is available at https://renderdoc.org/docs/in_application_api.html
//
#if !defined(RENDERDOC_NO_STDINT)
#include
#endif
#if defined(WIN32) || defined(__WIN32__) || defined(_WIN32) || defined(_MSC_VER)
#define RENDERDOC_CC __cdecl
#elif defined(__linux__)
#define RENDERDOC_CC
#elif defined(__APPLE__)
#define RENDERDOC_CC
#else
#error "Unknown platform"
#endif
#ifdef __cplusplus
extern "C" {
#endif
//////////////////////////////////////////////////////////////////////////////////////////////////
// Constants not used directly in below API
// This is a GUID/magic value used for when applications pass a path where shader debug
// information can be found to match up with a stripped shader.
// the define can be used like so: const GUID RENDERDOC_ShaderDebugMagicValue =
// RENDERDOC_ShaderDebugMagicValue_value
#define RENDERDOC_ShaderDebugMagicValue_struct \
{ \
0xeab25520, 0x6670, 0x4865, 0x84, 0x29, 0x6c, 0x8, 0x51, 0x54, 0x00, 0xff \
}
// as an alternative when you want a byte array (assuming x86 endianness):
#define RENDERDOC_ShaderDebugMagicValue_bytearray \
{ \
0x20, 0x55, 0xb2, 0xea, 0x70, 0x66, 0x65, 0x48, 0x84, 0x29, 0x6c, 0x8, 0x51, 0x54, 0x00, 0xff \
}
// truncated version when only a uint64_t is available (e.g. Vulkan tags):
#define RENDERDOC_ShaderDebugMagicValue_truncated 0x48656670eab25520ULL
//////////////////////////////////////////////////////////////////////////////////////////////////
// RenderDoc capture options
//
typedef enum RENDERDOC_CaptureOption {
// Allow the application to enable vsync
//
// Default - enabled
//
// 1 - The application can enable or disable vsync at will
// 0 - vsync is force disabled
eRENDERDOC_Option_AllowVSync = 0,
// Allow the application to enable fullscreen
//
// Default - enabled
//
// 1 - The application can enable or disable fullscreen at will
// 0 - fullscreen is force disabled
eRENDERDOC_Option_AllowFullscreen = 1,
// Record API debugging events and messages
//
// Default - disabled
//
// 1 - Enable built-in API debugging features and records the results into
// the capture, which is matched up with events on replay
// 0 - no API debugging is forcibly enabled
eRENDERDOC_Option_APIValidation = 2,
eRENDERDOC_Option_DebugDeviceMode = 2, // deprecated name of this enum
// Capture CPU callstacks for API events
//
// Default - disabled
//
// 1 - Enables capturing of callstacks
// 0 - no callstacks are captured
eRENDERDOC_Option_CaptureCallstacks = 3,
// When capturing CPU callstacks, only capture them from drawcalls.
// This option does nothing without the above option being enabled
//
// Default - disabled
//
// 1 - Only captures callstacks for drawcall type API events.
// Ignored if CaptureCallstacks is disabled
// 0 - Callstacks, if enabled, are captured for every event.
eRENDERDOC_Option_CaptureCallstacksOnlyDraws = 4,
// Specify a delay in seconds to wait for a debugger to attach, after
// creating or injecting into a process, before continuing to allow it to run.
//
// 0 indicates no delay, and the process will run immediately after injection
//
// Default - 0 seconds
//
eRENDERDOC_Option_DelayForDebugger = 5,
// Verify buffer access. This includes checking the memory returned by a Map() call to
// detect any out-of-bounds modification, as well as initialising buffers with undefined contents
// to a marker value to catch use of uninitialised memory.
//
// NOTE: This option is only valid for OpenGL and D3D11. Explicit APIs such as D3D12 and Vulkan do
// not do the same kind of interception & checking and undefined contents are really undefined.
//
// Default - disabled
//
// 1 - Verify buffer access
// 0 - No verification is performed, and overwriting bounds may cause crashes or corruption in
// RenderDoc.
eRENDERDOC_Option_VerifyBufferAccess = 6,
// The old name for eRENDERDOC_Option_VerifyBufferAccess was eRENDERDOC_Option_VerifyMapWrites.
// This option now controls the filling of uninitialised buffers with 0xdddddddd which was
// previously always enabled
eRENDERDOC_Option_VerifyMapWrites = eRENDERDOC_Option_VerifyBufferAccess,
// Hooks any system API calls that create child processes, and injects
// RenderDoc into them recursively with the same options.
//
// Default - disabled
//
// 1 - Hooks into spawned child processes
// 0 - Child processes are not hooked by RenderDoc
eRENDERDOC_Option_HookIntoChildren = 7,
// By default RenderDoc only includes resources in the final capture necessary
// for that frame, this allows you to override that behaviour.
//
// Default - disabled
//
// 1 - all live resources at the time of capture are included in the capture
// and available for inspection
// 0 - only the resources referenced by the captured frame are included
eRENDERDOC_Option_RefAllResources = 8,
// **NOTE**: As of RenderDoc v1.1 this option has been deprecated. Setting or
// getting it will be ignored, to allow compatibility with older versions.
// In v1.1 the option acts as if it's always enabled.
//
// By default RenderDoc skips saving initial states for resources where the
// previous contents don't appear to be used, assuming that writes before
// reads indicate previous contents aren't used.
//
// Default - disabled
//
// 1 - initial contents at the start of each captured frame are saved, even if
// they are later overwritten or cleared before being used.
// 0 - unless a read is detected, initial contents will not be saved and will
// appear as black or empty data.
eRENDERDOC_Option_SaveAllInitials = 9,
// In APIs that allow for the recording of command lists to be replayed later,
// RenderDoc may choose to not capture command lists before a frame capture is
// triggered, to reduce overheads. This means any command lists recorded once
// and replayed many times will not be available and may cause a failure to
// capture.
//
// NOTE: This is only true for APIs where multithreading is difficult or
// discouraged. Newer APIs like Vulkan and D3D12 will ignore this option
// and always capture all command lists since the API is heavily oriented
// around it and the overheads have been reduced by API design.
//
// 1 - All command lists are captured from the start of the application
// 0 - Command lists are only captured if their recording begins during
// the period when a frame capture is in progress.
eRENDERDOC_Option_CaptureAllCmdLists = 10,
// Mute API debugging output when the API validation mode option is enabled
//
// Default - enabled
//
// 1 - Mute any API debug messages from being displayed or passed through
// 0 - API debugging is displayed as normal
eRENDERDOC_Option_DebugOutputMute = 11,
// Option to allow vendor extensions to be used even when they may be
// incompatible with RenderDoc and cause corrupted replays or crashes.
//
// Default - inactive
//
// No values are documented, this option should only be used when absolutely
// necessary as directed by a RenderDoc developer.
eRENDERDOC_Option_AllowUnsupportedVendorExtensions = 12,
} RENDERDOC_CaptureOption;
// Sets an option that controls how RenderDoc behaves on capture.
//
// Returns 1 if the option and value are valid
// Returns 0 if either is invalid and the option is unchanged
typedef int(RENDERDOC_CC *pRENDERDOC_SetCaptureOptionU32)(RENDERDOC_CaptureOption opt, uint32_t val);
typedef int(RENDERDOC_CC *pRENDERDOC_SetCaptureOptionF32)(RENDERDOC_CaptureOption opt, float val);
// Gets the current value of an option as a uint32_t
//
// If the option is invalid, 0xffffffff is returned
typedef uint32_t(RENDERDOC_CC *pRENDERDOC_GetCaptureOptionU32)(RENDERDOC_CaptureOption opt);
// Gets the current value of an option as a float
//
// If the option is invalid, -FLT_MAX is returned
typedef float(RENDERDOC_CC *pRENDERDOC_GetCaptureOptionF32)(RENDERDOC_CaptureOption opt);
typedef enum RENDERDOC_InputButton {
// '0' - '9' matches ASCII values
eRENDERDOC_Key_0 = 0x30,
eRENDERDOC_Key_1 = 0x31,
eRENDERDOC_Key_2 = 0x32,
eRENDERDOC_Key_3 = 0x33,
eRENDERDOC_Key_4 = 0x34,
eRENDERDOC_Key_5 = 0x35,
eRENDERDOC_Key_6 = 0x36,
eRENDERDOC_Key_7 = 0x37,
eRENDERDOC_Key_8 = 0x38,
eRENDERDOC_Key_9 = 0x39,
// 'A' - 'Z' matches ASCII values
eRENDERDOC_Key_A = 0x41,
eRENDERDOC_Key_B = 0x42,
eRENDERDOC_Key_C = 0x43,
eRENDERDOC_Key_D = 0x44,
eRENDERDOC_Key_E = 0x45,
eRENDERDOC_Key_F = 0x46,
eRENDERDOC_Key_G = 0x47,
eRENDERDOC_Key_H = 0x48,
eRENDERDOC_Key_I = 0x49,
eRENDERDOC_Key_J = 0x4A,
eRENDERDOC_Key_K = 0x4B,
eRENDERDOC_Key_L = 0x4C,
eRENDERDOC_Key_M = 0x4D,
eRENDERDOC_Key_N = 0x4E,
eRENDERDOC_Key_O = 0x4F,
eRENDERDOC_Key_P = 0x50,
eRENDERDOC_Key_Q = 0x51,
eRENDERDOC_Key_R = 0x52,
eRENDERDOC_Key_S = 0x53,
eRENDERDOC_Key_T = 0x54,
eRENDERDOC_Key_U = 0x55,
eRENDERDOC_Key_V = 0x56,
eRENDERDOC_Key_W = 0x57,
eRENDERDOC_Key_X = 0x58,
eRENDERDOC_Key_Y = 0x59,
eRENDERDOC_Key_Z = 0x5A,
// leave the rest of the ASCII range free
// in case we want to use it later
eRENDERDOC_Key_NonPrintable = 0x100,
eRENDERDOC_Key_Divide,
eRENDERDOC_Key_Multiply,
eRENDERDOC_Key_Subtract,
eRENDERDOC_Key_Plus,
eRENDERDOC_Key_F1,
eRENDERDOC_Key_F2,
eRENDERDOC_Key_F3,
eRENDERDOC_Key_F4,
eRENDERDOC_Key_F5,
eRENDERDOC_Key_F6,
eRENDERDOC_Key_F7,
eRENDERDOC_Key_F8,
eRENDERDOC_Key_F9,
eRENDERDOC_Key_F10,
eRENDERDOC_Key_F11,
eRENDERDOC_Key_F12,
eRENDERDOC_Key_Home,
eRENDERDOC_Key_End,
eRENDERDOC_Key_Insert,
eRENDERDOC_Key_Delete,
eRENDERDOC_Key_PageUp,
eRENDERDOC_Key_PageDn,
eRENDERDOC_Key_Backspace,
eRENDERDOC_Key_Tab,
eRENDERDOC_Key_PrtScrn,
eRENDERDOC_Key_Pause,
eRENDERDOC_Key_Max,
} RENDERDOC_InputButton;
// Sets which key or keys can be used to toggle focus between multiple windows
//
// If keys is NULL or num is 0, toggle keys will be disabled
typedef void(RENDERDOC_CC *pRENDERDOC_SetFocusToggleKeys)(RENDERDOC_InputButton *keys, int num);
// Sets which key or keys can be used to capture the next frame
//
// If keys is NULL or num is 0, captures keys will be disabled
typedef void(RENDERDOC_CC *pRENDERDOC_SetCaptureKeys)(RENDERDOC_InputButton *keys, int num);
typedef enum RENDERDOC_OverlayBits {
// This single bit controls whether the overlay is enabled or disabled globally
eRENDERDOC_Overlay_Enabled = 0x1,
// Show the average framerate over several seconds as well as min/max
eRENDERDOC_Overlay_FrameRate = 0x2,
// Show the current frame number
eRENDERDOC_Overlay_FrameNumber = 0x4,
// Show a list of recent captures, and how many captures have been made
eRENDERDOC_Overlay_CaptureList = 0x8,
// Default values for the overlay mask
eRENDERDOC_Overlay_Default = (eRENDERDOC_Overlay_Enabled | eRENDERDOC_Overlay_FrameRate |
eRENDERDOC_Overlay_FrameNumber | eRENDERDOC_Overlay_CaptureList),
// Enable all bits
eRENDERDOC_Overlay_All = ~0U,
// Disable all bits
eRENDERDOC_Overlay_None = 0,
} RENDERDOC_OverlayBits;
// returns the overlay bits that have been set
typedef uint32_t(RENDERDOC_CC *pRENDERDOC_GetOverlayBits)();
// sets the overlay bits with an and & or mask
typedef void(RENDERDOC_CC *pRENDERDOC_MaskOverlayBits)(uint32_t And, uint32_t Or);
// this function will attempt to remove RenderDoc's hooks in the application.
//
// Note: that this can only work correctly if done immediately after
// the module is loaded, before any API work happens. RenderDoc will remove its
// injected hooks and shut down. Behaviour is undefined if this is called
// after any API functions have been called, and there is still no guarantee of
// success.
typedef void(RENDERDOC_CC *pRENDERDOC_RemoveHooks)();
// DEPRECATED: compatibility for code compiled against pre-1.4.1 headers.
typedef pRENDERDOC_RemoveHooks pRENDERDOC_Shutdown;
// This function will unload RenderDoc's crash handler.
//
// If you use your own crash handler and don't want RenderDoc's handler to
// intercede, you can call this function to unload it and any unhandled
// exceptions will pass to the next handler.
typedef void(RENDERDOC_CC *pRENDERDOC_UnloadCrashHandler)();
// Sets the capture file path template
//
// pathtemplate is a UTF-8 string that gives a template for how captures will be named
// and where they will be saved.
//
// Any extension is stripped off the path, and captures are saved in the directory
// specified, and named with the filename and the frame number appended. If the
// directory does not exist it will be created, including any parent directories.
//
// If pathtemplate is NULL, the template will remain unchanged
//
// Example:
//
// SetCaptureFilePathTemplate("my_captures/example");
//
// Capture #1 -> my_captures/example_frame123.rdc
// Capture #2 -> my_captures/example_frame456.rdc
typedef void(RENDERDOC_CC *pRENDERDOC_SetCaptureFilePathTemplate)(const char *pathtemplate);
// returns the current capture path template, see SetCaptureFileTemplate above, as a UTF-8 string
typedef const char *(RENDERDOC_CC *pRENDERDOC_GetCaptureFilePathTemplate)();
// DEPRECATED: compatibility for code compiled against pre-1.1.2 headers.
typedef pRENDERDOC_SetCaptureFilePathTemplate pRENDERDOC_SetLogFilePathTemplate;
typedef pRENDERDOC_GetCaptureFilePathTemplate pRENDERDOC_GetLogFilePathTemplate;
// returns the number of captures that have been made
typedef uint32_t(RENDERDOC_CC *pRENDERDOC_GetNumCaptures)();
// This function returns the details of a capture, by index. New captures are added
// to the end of the list.
//
// filename will be filled with the absolute path to the capture file, as a UTF-8 string
// pathlength will be written with the length in bytes of the filename string
// timestamp will be written with the time of the capture, in seconds since the Unix epoch
//
// Any of the parameters can be NULL and they'll be skipped.
//
// The function will return 1 if the capture index is valid, or 0 if the index is invalid
// If the index is invalid, the values will be unchanged
//
// Note: when captures are deleted in the UI they will remain in this list, so the
// capture path may not exist anymore.
typedef uint32_t(RENDERDOC_CC *pRENDERDOC_GetCapture)(uint32_t idx, char *filename,
uint32_t *pathlength, uint64_t *timestamp);
// Sets the comments associated with a capture file. These comments are displayed in the
// UI program when opening.
//
// filePath should be a path to the capture file to add comments to. If set to NULL or ""
// the most recent capture file created made will be used instead.
// comments should be a NULL-terminated UTF-8 string to add as comments.
//
// Any existing comments will be overwritten.
typedef void(RENDERDOC_CC *pRENDERDOC_SetCaptureFileComments)(const char *filePath,
const char *comments);
// returns 1 if the RenderDoc UI is connected to this application, 0 otherwise
typedef uint32_t(RENDERDOC_CC *pRENDERDOC_IsTargetControlConnected)();
// DEPRECATED: compatibility for code compiled against pre-1.1.1 headers.
// This was renamed to IsTargetControlConnected in API 1.1.1, the old typedef is kept here for
// backwards compatibility with old code, it is castable either way since it's ABI compatible
// as the same function pointer type.
typedef pRENDERDOC_IsTargetControlConnected pRENDERDOC_IsRemoteAccessConnected;
// This function will launch the Replay UI associated with the RenderDoc library injected
// into the running application.
//
// if connectTargetControl is 1, the Replay UI will be launched with a command line parameter
// to connect to this application
// cmdline is the rest of the command line, as a UTF-8 string. E.g. a captures to open
// if cmdline is NULL, the command line will be empty.
//
// returns the PID of the replay UI if successful, 0 if not successful.
typedef uint32_t(RENDERDOC_CC *pRENDERDOC_LaunchReplayUI)(uint32_t connectTargetControl,
const char *cmdline);
// RenderDoc can return a higher version than requested if it's backwards compatible,
// this function returns the actual version returned. If a parameter is NULL, it will be
// ignored and the others will be filled out.
typedef void(RENDERDOC_CC *pRENDERDOC_GetAPIVersion)(int *major, int *minor, int *patch);
//////////////////////////////////////////////////////////////////////////
// Capturing functions
//
// A device pointer is a pointer to the API's root handle.
//
// This would be an ID3D11Device, HGLRC/GLXContext, ID3D12Device, etc
typedef void *RENDERDOC_DevicePointer;
// A window handle is the OS's native window handle
//
// This would be an HWND, GLXDrawable, etc
typedef void *RENDERDOC_WindowHandle;
// A helper macro for Vulkan, where the device handle cannot be used directly.
//
// Passing the VkInstance to this macro will return the RENDERDOC_DevicePointer to use.
//
// Specifically, the value needed is the dispatch table pointer, which sits as the first
// pointer-sized object in the memory pointed to by the VkInstance. Thus we cast to a void** and
// indirect once.
#define RENDERDOC_DEVICEPOINTER_FROM_VKINSTANCE(inst) (*((void **)(inst)))
// This sets the RenderDoc in-app overlay in the API/window pair as 'active' and it will
// respond to keypresses. Neither parameter can be NULL
typedef void(RENDERDOC_CC *pRENDERDOC_SetActiveWindow)(RENDERDOC_DevicePointer device,
RENDERDOC_WindowHandle wndHandle);
// capture the next frame on whichever window and API is currently considered active
typedef void(RENDERDOC_CC *pRENDERDOC_TriggerCapture)();
// capture the next N frames on whichever window and API is currently considered active
typedef void(RENDERDOC_CC *pRENDERDOC_TriggerMultiFrameCapture)(uint32_t numFrames);
// When choosing either a device pointer or a window handle to capture, you can pass NULL.
// Passing NULL specifies a 'wildcard' match against anything. This allows you to specify
// any API rendering to a specific window, or a specific API instance rendering to any window,
// or in the simplest case of one window and one API, you can just pass NULL for both.
//
// In either case, if there are two or more possible matching (device,window) pairs it
// is undefined which one will be captured.
//
// Note: for headless rendering you can pass NULL for the window handle and either specify
// a device pointer or leave it NULL as above.
// Immediately starts capturing API calls on the specified device pointer and window handle.
//
// If there is no matching thing to capture (e.g. no supported API has been initialised),
// this will do nothing.
//
// The results are undefined (including crashes) if two captures are started overlapping,
// even on separate devices and/oror windows.
typedef void(RENDERDOC_CC *pRENDERDOC_StartFrameCapture)(RENDERDOC_DevicePointer device,
RENDERDOC_WindowHandle wndHandle);
// Returns whether or not a frame capture is currently ongoing anywhere.
//
// This will return 1 if a capture is ongoing, and 0 if there is no capture running
typedef uint32_t(RENDERDOC_CC *pRENDERDOC_IsFrameCapturing)();
// Ends capturing immediately.
//
// This will return 1 if the capture succeeded, and 0 if there was an error capturing.
typedef uint32_t(RENDERDOC_CC *pRENDERDOC_EndFrameCapture)(RENDERDOC_DevicePointer device,
RENDERDOC_WindowHandle wndHandle);
// Ends capturing immediately and discard any data stored without saving to disk.
//
// This will return 1 if the capture was discarded, and 0 if there was an error or no capture
// was in progress
typedef uint32_t(RENDERDOC_CC *pRENDERDOC_DiscardFrameCapture)(RENDERDOC_DevicePointer device,
RENDERDOC_WindowHandle wndHandle);
//////////////////////////////////////////////////////////////////////////////////////////////////
// RenderDoc API versions
//
// RenderDoc uses semantic versioning (http://semver.org/).
//
// MAJOR version is incremented when incompatible API changes happen.
// MINOR version is incremented when functionality is added in a backwards-compatible manner.
// PATCH version is incremented when backwards-compatible bug fixes happen.
//
// Note that this means the API returned can be higher than the one you might have requested.
// e.g. if you are running against a newer RenderDoc that supports 1.0.1, it will be returned
// instead of 1.0.0. You can check this with the GetAPIVersion entry point
typedef enum RENDERDOC_Version {
eRENDERDOC_API_Version_1_0_0 = 10000, // RENDERDOC_API_1_0_0 = 1 00 00
eRENDERDOC_API_Version_1_0_1 = 10001, // RENDERDOC_API_1_0_1 = 1 00 01
eRENDERDOC_API_Version_1_0_2 = 10002, // RENDERDOC_API_1_0_2 = 1 00 02
eRENDERDOC_API_Version_1_1_0 = 10100, // RENDERDOC_API_1_1_0 = 1 01 00
eRENDERDOC_API_Version_1_1_1 = 10101, // RENDERDOC_API_1_1_1 = 1 01 01
eRENDERDOC_API_Version_1_1_2 = 10102, // RENDERDOC_API_1_1_2 = 1 01 02
eRENDERDOC_API_Version_1_2_0 = 10200, // RENDERDOC_API_1_2_0 = 1 02 00
eRENDERDOC_API_Version_1_3_0 = 10300, // RENDERDOC_API_1_3_0 = 1 03 00
eRENDERDOC_API_Version_1_4_0 = 10400, // RENDERDOC_API_1_4_0 = 1 04 00
eRENDERDOC_API_Version_1_4_1 = 10401, // RENDERDOC_API_1_4_1 = 1 04 01
} RENDERDOC_Version;
// API version changelog:
//
// 1.0.0 - initial release
// 1.0.1 - Bugfix: IsFrameCapturing() was returning false for captures that were triggered
// by keypress or TriggerCapture, instead of Start/EndFrameCapture.
// 1.0.2 - Refactor: Renamed eRENDERDOC_Option_DebugDeviceMode to eRENDERDOC_Option_APIValidation
// 1.1.0 - Add feature: TriggerMultiFrameCapture(). Backwards compatible with 1.0.x since the new
// function pointer is added to the end of the struct, the original layout is identical
// 1.1.1 - Refactor: Renamed remote access to target control (to better disambiguate from remote
// replay/remote server concept in replay UI)
// 1.1.2 - Refactor: Renamed "log file" in function names to just capture, to clarify that these
// are captures and not debug logging files. This is the first API version in the v1.0
// branch.
// 1.2.0 - Added feature: SetCaptureFileComments() to add comments to a capture file that will be
// displayed in the UI program on load.
// 1.3.0 - Added feature: New capture option eRENDERDOC_Option_AllowUnsupportedVendorExtensions
// which allows users to opt-in to allowing unsupported vendor extensions to function.
// Should be used at the user's own risk.
// Refactor: Renamed eRENDERDOC_Option_VerifyMapWrites to
// eRENDERDOC_Option_VerifyBufferAccess, which now also controls initialisation to
// 0xdddddddd of uninitialised buffer contents.
// 1.4.0 - Added feature: DiscardFrameCapture() to discard a frame capture in progress and stop
// capturing without saving anything to disk.
// 1.4.1 - Refactor: Renamed Shutdown to RemoveHooks to better clarify what is happening
typedef struct RENDERDOC_API_1_4_1
{
pRENDERDOC_GetAPIVersion GetAPIVersion;
pRENDERDOC_SetCaptureOptionU32 SetCaptureOptionU32;
pRENDERDOC_SetCaptureOptionF32 SetCaptureOptionF32;
pRENDERDOC_GetCaptureOptionU32 GetCaptureOptionU32;
pRENDERDOC_GetCaptureOptionF32 GetCaptureOptionF32;
pRENDERDOC_SetFocusToggleKeys SetFocusToggleKeys;
pRENDERDOC_SetCaptureKeys SetCaptureKeys;
pRENDERDOC_GetOverlayBits GetOverlayBits;
pRENDERDOC_MaskOverlayBits MaskOverlayBits;
// Shutdown was renamed to RemoveHooks in 1.4.1.
// These unions allow old code to continue compiling without changes
union
{
pRENDERDOC_Shutdown Shutdown;
pRENDERDOC_RemoveHooks RemoveHooks;
};
pRENDERDOC_UnloadCrashHandler UnloadCrashHandler;
// Get/SetLogFilePathTemplate was renamed to Get/SetCaptureFilePathTemplate in 1.1.2.
// These unions allow old code to continue compiling without changes
union
{
// deprecated name
pRENDERDOC_SetLogFilePathTemplate SetLogFilePathTemplate;
// current name
pRENDERDOC_SetCaptureFilePathTemplate SetCaptureFilePathTemplate;
};
union
{
// deprecated name
pRENDERDOC_GetLogFilePathTemplate GetLogFilePathTemplate;
// current name
pRENDERDOC_GetCaptureFilePathTemplate GetCaptureFilePathTemplate;
};
pRENDERDOC_GetNumCaptures GetNumCaptures;
pRENDERDOC_GetCapture GetCapture;
pRENDERDOC_TriggerCapture TriggerCapture;
// IsRemoteAccessConnected was renamed to IsTargetControlConnected in 1.1.1.
// This union allows old code to continue compiling without changes
union
{
// deprecated name
pRENDERDOC_IsRemoteAccessConnected IsRemoteAccessConnected;
// current name
pRENDERDOC_IsTargetControlConnected IsTargetControlConnected;
};
pRENDERDOC_LaunchReplayUI LaunchReplayUI;
pRENDERDOC_SetActiveWindow SetActiveWindow;
pRENDERDOC_StartFrameCapture StartFrameCapture;
pRENDERDOC_IsFrameCapturing IsFrameCapturing;
pRENDERDOC_EndFrameCapture EndFrameCapture;
// new function in 1.1.0
pRENDERDOC_TriggerMultiFrameCapture TriggerMultiFrameCapture;
// new function in 1.2.0
pRENDERDOC_SetCaptureFileComments SetCaptureFileComments;
// new function in 1.4.0
pRENDERDOC_DiscardFrameCapture DiscardFrameCapture;
} RENDERDOC_API_1_4_1;
typedef RENDERDOC_API_1_4_1 RENDERDOC_API_1_0_0;
typedef RENDERDOC_API_1_4_1 RENDERDOC_API_1_0_1;
typedef RENDERDOC_API_1_4_1 RENDERDOC_API_1_0_2;
typedef RENDERDOC_API_1_4_1 RENDERDOC_API_1_1_0;
typedef RENDERDOC_API_1_4_1 RENDERDOC_API_1_1_1;
typedef RENDERDOC_API_1_4_1 RENDERDOC_API_1_1_2;
typedef RENDERDOC_API_1_4_1 RENDERDOC_API_1_2_0;
typedef RENDERDOC_API_1_4_1 RENDERDOC_API_1_3_0;
typedef RENDERDOC_API_1_4_1 RENDERDOC_API_1_4_0;
//////////////////////////////////////////////////////////////////////////////////////////////////
// RenderDoc API entry point
//
// This entry point can be obtained via GetProcAddress/dlsym if RenderDoc is available.
//
// The name is the same as the typedef - "RENDERDOC_GetAPI"
//
// This function is not thread safe, and should not be called on multiple threads at once.
// Ideally, call this once as early as possible in your application's startup, before doing
// any API work, since some configuration functionality etc has to be done also before
// initialising any APIs.
//
// Parameters:
// version is a single value from the RENDERDOC_Version above.
//
// outAPIPointers will be filled out with a pointer to the corresponding struct of function
// pointers.
//
// Returns:
// 1 - if the outAPIPointers has been filled with a pointer to the API struct requested
// 0 - if the requested version is not supported or the arguments are invalid.
//
typedef int(RENDERDOC_CC *pRENDERDOC_GetAPI)(RENDERDOC_Version version, void **outAPIPointers);
#ifdef __cplusplus
} // extern "C"
#endif
megapixels-1.4.3/src/zbar_pipeline.c 0000664 0000000 0000000 00000017263 14155633267 0017460 0 ustar 00root root 0000000 0000000 #include "zbar_pipeline.h"
#include "io_pipeline.h"
#include "main.h"
#include "pipeline.h"
#include
#include
struct _MPZBarImage {
uint8_t *data;
MPPixelFormat pixel_format;
int width;
int height;
int rotation;
bool mirrored;
_Atomic int ref_count;
};
static MPPipeline *pipeline;
static volatile int frames_processed = 0;
static volatile int frames_received = 0;
static zbar_image_scanner_t *scanner;
static void
setup(MPPipeline *pipeline, const void *data)
{
scanner = zbar_image_scanner_create();
zbar_image_scanner_set_config(scanner, 0, ZBAR_CFG_ENABLE, 1);
}
void
mp_zbar_pipeline_start()
{
pipeline = mp_pipeline_new();
mp_pipeline_invoke(pipeline, setup, NULL, 0);
}
void
mp_zbar_pipeline_stop()
{
mp_pipeline_free(pipeline);
}
static bool
is_3d_code(zbar_symbol_type_t type)
{
switch (type) {
case ZBAR_EAN2:
case ZBAR_EAN5:
case ZBAR_EAN8:
case ZBAR_UPCE:
case ZBAR_ISBN10:
case ZBAR_UPCA:
case ZBAR_EAN13:
case ZBAR_ISBN13:
case ZBAR_I25:
case ZBAR_DATABAR:
case ZBAR_DATABAR_EXP:
case ZBAR_CODABAR:
case ZBAR_CODE39:
case ZBAR_CODE93:
case ZBAR_CODE128:
return false;
case ZBAR_COMPOSITE:
case ZBAR_PDF417:
case ZBAR_QRCODE:
case ZBAR_SQCODE:
return true;
default:
return false;
}
}
static inline void
map_coords(int *x, int *y, int width, int height, int rotation, bool mirrored)
{
int x_r, y_r;
if (rotation == 0) {
x_r = *x;
y_r = *y;
} else if (rotation == 90) {
x_r = *y;
y_r = height - *x - 1;
} else if (rotation == 270) {
x_r = width - *y - 1;
y_r = *x;
} else {
x_r = width - *x - 1;
y_r = height - *y - 1;
}
if (mirrored) {
x_r = width - x_r - 1;
}
*x = x_r;
*y = y_r;
}
static MPZBarCode
process_symbol(const MPZBarImage *image,
int width,
int height,
const zbar_symbol_t *symbol)
{
if (image->rotation == 90 || image->rotation == 270) {
int tmp = width;
width = height;
height = tmp;
}
MPZBarCode code;
unsigned loc_size = zbar_symbol_get_loc_size(symbol);
assert(loc_size > 0);
zbar_symbol_type_t type = zbar_symbol_get_type(symbol);
if (is_3d_code(type) && loc_size == 4) {
for (unsigned i = 0; i < loc_size; ++i) {
code.bounds_x[i] = zbar_symbol_get_loc_x(symbol, i);
code.bounds_y[i] = zbar_symbol_get_loc_y(symbol, i);
}
} else {
int min_x = zbar_symbol_get_loc_x(symbol, 0);
int min_y = zbar_symbol_get_loc_y(symbol, 0);
int max_x = min_x, max_y = min_y;
for (unsigned i = 1; i < loc_size; ++i) {
int x = zbar_symbol_get_loc_x(symbol, i);
int y = zbar_symbol_get_loc_y(symbol, i);
min_x = MIN(min_x, x);
min_y = MIN(min_y, y);
max_x = MAX(max_x, x);
max_y = MAX(max_y, y);
}
code.bounds_x[0] = min_x;
code.bounds_y[0] = min_y;
code.bounds_x[1] = max_x;
code.bounds_y[1] = min_y;
code.bounds_x[2] = max_x;
code.bounds_y[2] = max_y;
code.bounds_x[3] = min_x;
code.bounds_y[3] = max_y;
}
for (uint8_t i = 0; i < 4; ++i) {
map_coords(&code.bounds_x[i],
&code.bounds_y[i],
width,
height,
image->rotation,
image->mirrored);
}
const char *data = zbar_symbol_get_data(symbol);
unsigned int data_size = zbar_symbol_get_data_length(symbol);
code.type = zbar_get_symbol_name(type);
code.data = strndup(data, data_size + 1);
code.data[data_size] = 0;
return code;
}
static void
process_image(MPPipeline *pipeline, MPZBarImage **_image)
{
MPZBarImage *image = *_image;
assert(image->pixel_format == MP_PIXEL_FMT_BGGR8 ||
image->pixel_format == MP_PIXEL_FMT_GBRG8 ||
image->pixel_format == MP_PIXEL_FMT_GRBG8 ||
image->pixel_format == MP_PIXEL_FMT_RGGB8);
// Create a grayscale image for scanning from the current preview.
// Rotate/mirror correctly.
int width = image->width / 2;
int height = image->height / 2;
uint8_t *data = malloc(width * height * sizeof(uint8_t));
size_t i = 0;
for (int y = 0; y < image->height; y += 2) {
for (int x = 0; x < image->width; x += 2) {
data[i++] = image->data[x + image->width * y];
}
}
// Create image for zbar
zbar_image_t *zbar_image = zbar_image_create();
zbar_image_set_format(zbar_image, zbar_fourcc('Y', '8', '0', '0'));
zbar_image_set_size(zbar_image, width, height);
zbar_image_set_data(zbar_image,
data,
width * height * sizeof(uint8_t),
zbar_image_free_data);
int res = zbar_scan_image(scanner, zbar_image);
assert(res >= 0);
if (res > 0) {
MPZBarScanResult *result = malloc(sizeof(MPZBarScanResult));
result->size = res;
const zbar_symbol_t *symbol = zbar_image_first_symbol(zbar_image);
for (int i = 0; i < MIN(res, 8); ++i) {
assert(symbol != NULL);
result->codes[i] =
process_symbol(image, width, height, symbol);
symbol = zbar_symbol_next(symbol);
}
mp_main_set_zbar_result(result);
} else {
mp_main_set_zbar_result(NULL);
}
zbar_image_destroy(zbar_image);
mp_zbar_image_unref(image);
++frames_processed;
}
void
mp_zbar_pipeline_process_image(MPZBarImage *image)
{
// If we haven't processed the previous frame yet, drop this one
if (frames_received != frames_processed) {
mp_zbar_image_unref(image);
return;
}
++frames_received;
mp_pipeline_invoke(pipeline,
(MPPipelineCallback)process_image,
&image,
sizeof(MPZBarImage *));
}
MPZBarImage *
mp_zbar_image_new(uint8_t *data,
MPPixelFormat pixel_format,
int width,
int height,
int rotation,
bool mirrored)
{
MPZBarImage *image = malloc(sizeof(MPZBarImage));
image->data = data;
image->pixel_format = pixel_format;
image->width = width;
image->height = height;
image->rotation = rotation;
image->mirrored = mirrored;
image->ref_count = 1;
return image;
}
MPZBarImage *
mp_zbar_image_ref(MPZBarImage *image)
{
++image->ref_count;
return image;
}
void
mp_zbar_image_unref(MPZBarImage *image)
{
if (--image->ref_count == 0) {
free(image->data);
free(image);
}
}
megapixels-1.4.3/src/zbar_pipeline.h 0000664 0000000 0000000 00000001441 14155633267 0017454 0 ustar 00root root 0000000 0000000 #pragma once
#include "camera_config.h"
typedef struct _MPZBarImage MPZBarImage;
typedef struct {
int bounds_x[4];
int bounds_y[4];
char *data;
const char *type;
} MPZBarCode;
typedef struct {
MPZBarCode codes[8];
uint8_t size;
} MPZBarScanResult;
void mp_zbar_pipeline_start();
void mp_zbar_pipeline_stop();
void mp_zbar_pipeline_process_image(MPZBarImage *image);
MPZBarImage *mp_zbar_image_new(uint8_t *data,
MPPixelFormat pixel_format,
int width,
int height,
int rotation,
bool mirrored);
MPZBarImage *mp_zbar_image_ref(MPZBarImage *image);
void mp_zbar_image_unref(MPZBarImage *image);
megapixels-1.4.3/tools/ 0000775 0000000 0000000 00000000000 14155633267 0015031 5 ustar 00root root 0000000 0000000 megapixels-1.4.3/tools/camera_test.c 0000664 0000000 0000000 00000016405 14155633267 0017472 0 ustar 00root root 0000000 0000000 #include "camera.h"
#include "device.h"
#include
#include
#include
#include
#include
double
get_time()
{
struct timeval t;
struct timezone tzp;
gettimeofday(&t, &tzp);
return t.tv_sec + t.tv_usec * 1e-6;
}
int
main(int argc, char *argv[])
{
if (argc != 2 && argc != 3) {
printf("Usage: %s []\n",
argv[0]);
return 1;
}
char *video_name = argv[1];
char *subdev_name = NULL;
if (argc == 3) {
subdev_name = argv[2];
}
double find_start = get_time();
// First find the device
MPDevice *device = mp_device_find(video_name);
if (!device) {
printf("Device not found\n");
return 1;
}
double find_end = get_time();
printf("Finding the device took %fms\n", (find_end - find_start) * 1000);
int video_fd;
uint32_t video_entity_id;
{
const struct media_v2_entity *entity =
mp_device_find_entity(device, video_name);
if (!entity) {
printf("Unable to find video device interface\n");
return 1;
}
video_entity_id = entity->id;
const struct media_v2_interface *iface =
mp_device_find_entity_interface(device, video_entity_id);
char buf[256];
if (!mp_find_device_path(iface->devnode, buf, 256)) {
printf("Unable to find video device path\n");
return 1;
}
video_fd = open(buf, O_RDWR);
if (video_fd == -1) {
printf("Unable to open video device\n");
return 1;
}
}
int subdev_fd = -1;
if (subdev_name) {
const struct media_v2_entity *entity =
mp_device_find_entity(device, subdev_name);
if (!entity) {
printf("Unable to find sub-device\n");
return 1;
}
const struct media_v2_pad *source_pad =
mp_device_get_pad_from_entity(device, entity->id);
const struct media_v2_pad *sink_pad =
mp_device_get_pad_from_entity(device, video_entity_id);
// Disable other links
const struct media_v2_entity *entities =
mp_device_get_entities(device);
for (int i = 0; i < mp_device_get_num_entities(device); ++i) {
if (entities[i].id != video_entity_id &&
entities[i].id != entity->id) {
const struct media_v2_pad *pad =
mp_device_get_pad_from_entity(
device, entities[i].id);
mp_device_setup_link(
device, pad->id, sink_pad->id, false);
}
}
// Then enable ours
mp_device_setup_link(device, source_pad->id, sink_pad->id, true);
const struct media_v2_interface *iface =
mp_device_find_entity_interface(device, entity->id);
char buf[256];
if (!mp_find_device_path(iface->devnode, buf, 256)) {
printf("Unable to find sub-device path\n");
return 1;
}
subdev_fd = open(buf, O_RDWR);
if (subdev_fd == -1) {
printf("Unable to open sub-device\n");
return 1;
}
}
double open_end = get_time();
printf("Opening the device took %fms\n", (open_end - find_end) * 1000);
MPCamera *camera = mp_camera_new(video_fd, subdev_fd);
MPControlList *controls = mp_camera_list_controls(camera);
double control_list_end = get_time();
printf("Available controls: (took %fms)\n",
(control_list_end - open_end) * 1000);
for (MPControlList *list = controls; list;
list = mp_control_list_next(list)) {
MPControl *c = mp_control_list_get(list);
printf(" %32s id:%s type:%s default:%d\n",
c->name,
mp_control_id_to_str(c->id),
mp_control_type_to_str(c->type),
c->default_value);
}
double mode_list_begin = get_time();
MPCameraModeList *modes = mp_camera_list_available_modes(camera);
double mode_list_end = get_time();
printf("Available modes: (took %fms)\n",
(mode_list_end - mode_list_begin) * 1000);
for (MPCameraModeList *list = modes; list;
list = mp_camera_mode_list_next(list)) {
MPCameraMode *m = mp_camera_mode_list_get(list);
printf(" %dx%d interval:%d/%d fmt:%s\n",
m->width,
m->height,
m->frame_interval.numerator,
m->frame_interval.denominator,
mp_pixel_format_to_str(m->pixel_format));
// Skip really slow framerates
if (m->frame_interval.denominator < 15) {
printf(" Skipping…\n");
continue;
}
double start_capture = get_time();
mp_camera_set_mode(camera, m);
mp_camera_start_capture(camera);
double last = get_time();
printf(" Testing 10 captures, starting took %fms\n",
(last - start_capture) * 1000);
for (int i = 0; i < 10; ++i) {
MPBuffer buffer;
if (!mp_camera_capture_buffer(camera, &buffer)) {
printf(" Failed to capture buffer\n");
}
size_t num_bytes = mp_pixel_format_width_to_bytes(
m->pixel_format, m->width) *
m->height;
uint8_t *data = malloc(num_bytes);
memcpy(data, buffer.data, num_bytes);
printf(" first byte: %d.", data[0]);
free(data);
mp_camera_release_buffer(camera, buffer.index);
double now = get_time();
printf(" capture took %fms\n", (now - last) * 1000);
last = now;
}
mp_camera_stop_capture(camera);
}
double cleanup_start = get_time();
mp_camera_free(camera);
close(video_fd);
if (subdev_fd != -1)
close(subdev_fd);
mp_device_close(device);
double cleanup_end = get_time();
printf("Cleanup took %fms\n", (cleanup_end - cleanup_start) * 1000);
}
megapixels-1.4.3/tools/list_devices.c 0000664 0000000 0000000 00000005463 14155633267 0017662 0 ustar 00root root 0000000 0000000 #include "device.h"
#include
#include
int
main(int argc, char *argv[])
{
MPDeviceList *list = mp_device_list_new();
while (list) {
MPDevice *device = mp_device_list_get(list);
const struct media_device_info *info = mp_device_get_info(device);
printf("%s (%s) %s\n", info->model, info->driver, info->serial);
printf(" Bus Info: %s\n", info->bus_info);
printf(" Media Version: %d\n", info->media_version);
printf(" HW Revision: %d\n", info->hw_revision);
printf(" Driver Version: %d\n", info->driver_version);
const struct media_v2_entity *entities =
mp_device_get_entities(device);
size_t num = mp_device_get_num_entities(device);
printf(" Entities (%ld):\n", num);
for (int i = 0; i < num; ++i) {
printf(" %d %s (%d)\n",
entities[i].id,
entities[i].name,
entities[i].function);
}
const struct media_v2_interface *interfaces =
mp_device_get_interfaces(device);
num = mp_device_get_num_interfaces(device);
printf(" Interfaces (%ld):\n", num);
for (int i = 0; i < num; ++i) {
printf(" %d (%d - %d) devnode %d:%d\n",
interfaces[i].id,
interfaces[i].intf_type,
interfaces[i].flags,
interfaces[i].devnode.major,
interfaces[i].devnode.minor);
}
const struct media_v2_pad *pads = mp_device_get_pads(device);
num = mp_device_get_num_pads(device);
printf(" Pads (%ld):\n", num);
for (int i = 0; i < num; ++i) {
printf(" %d for device:%d (%d)\n",
pads[i].id,
pads[i].entity_id,
pads[i].flags);
}
const struct media_v2_link *links = mp_device_get_links(device);
num = mp_device_get_num_links(device);
printf(" Links (%ld):\n", num);
for (int i = 0; i < num; ++i) {
printf(" %d from:%d to:%d (%d)\n",
links[i].id,
links[i].source_id,
links[i].sink_id,
links[i].flags);
}
list = mp_device_list_next(list);
}
mp_device_list_free(list);
}