mirror of https://github.com/docker/buildx.git
bump github.com/zclconf/go-cty from 1.7.1 to 1.10.0
Signed-off-by: CrazyMax <crazy-max@users.noreply.github.com>
This commit is contained in:
parent
785c861233
commit
b67bdedb23
2
go.mod
2
go.mod
|
@ -43,7 +43,7 @@ require (
|
|||
github.com/spf13/pflag v1.0.5
|
||||
github.com/stretchr/testify v1.7.0
|
||||
github.com/theupdateframework/notary v0.6.1 // indirect
|
||||
github.com/zclconf/go-cty v1.7.1
|
||||
github.com/zclconf/go-cty v1.10.0
|
||||
go.opentelemetry.io/otel v1.2.0
|
||||
go.opentelemetry.io/otel/trace v1.2.0
|
||||
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c
|
||||
|
|
6
go.sum
6
go.sum
|
@ -189,6 +189,8 @@ github.com/apparentlymart/go-textseg v1.0.0 h1:rRmlIsPEEhUTIKQb7T++Nz/A5Q6C9IuX2
|
|||
github.com/apparentlymart/go-textseg v1.0.0/go.mod h1:z96Txxhf3xSFMPmb5X/1W05FF/Nj9VFpLOpjS5yuumk=
|
||||
github.com/apparentlymart/go-textseg/v12 v12.0.0 h1:bNEQyAGak9tojivJNkoqWErVCQbjdL7GzRt3F8NvfJ0=
|
||||
github.com/apparentlymart/go-textseg/v12 v12.0.0/go.mod h1:S/4uRK2UtaQttw1GenVJEynmyUenKwP++x/+DdGV/Ec=
|
||||
github.com/apparentlymart/go-textseg/v13 v13.0.0 h1:Y+KvPE1NYz0xl601PVImeQfFyEy6iT90AvPUL1NNfNw=
|
||||
github.com/apparentlymart/go-textseg/v13 v13.0.0/go.mod h1:ZK2fH7c4NqDTLtiYLvIkEghdlcqw7yxLeM89kiTRPUo=
|
||||
github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o=
|
||||
github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8=
|
||||
github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY=
|
||||
|
@ -1337,8 +1339,8 @@ github.com/yvasiyarov/gorelic v0.0.0-20141212073537-a9bba5b9ab50/go.mod h1:NUSPS
|
|||
github.com/yvasiyarov/newrelic_platform_go v0.0.0-20140908184405-b21fdbd4370f/go.mod h1:GlGEuHIJweS1mbCqG+7vt2nvWLzLLnRHbXz5JKd/Qbg=
|
||||
github.com/zclconf/go-cty v1.2.0/go.mod h1:hOPWgoHbaTUnI5k4D2ld+GRpFJSCe6bCM7m1q/N4PQ8=
|
||||
github.com/zclconf/go-cty v1.4.0/go.mod h1:nHzOclRkoj++EU9ZjSrZvRG0BXIWt8c7loYc0qXAFGQ=
|
||||
github.com/zclconf/go-cty v1.7.1 h1:AvsC01GMhMLFL8CgEYdHGM+yLnnDOwhPAYcgTkeF0Gw=
|
||||
github.com/zclconf/go-cty v1.7.1/go.mod h1:VDR4+I79ubFBGm1uJac1226K5yANQFHeauxPBoP54+o=
|
||||
github.com/zclconf/go-cty v1.10.0 h1:mp9ZXQeIcN8kAwuqorjH+Q+njbJKjLrvB2yIh4q7U+0=
|
||||
github.com/zclconf/go-cty v1.10.0/go.mod h1:vVKLxnk3puL4qRAv72AO+W99LUD4da90g3uUAzyuvAk=
|
||||
go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU=
|
||||
go.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU=
|
||||
go.etcd.io/bbolt v1.3.5/go.mod h1:G5EMThwa9y8QZGBClrRx5EY+Yw9kAhnjy3bSjsnlVTQ=
|
||||
|
|
|
@ -0,0 +1,95 @@
|
|||
Copyright (c) 2017 Martin Atkins
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
||||
---------
|
||||
|
||||
Unicode table generation programs are under a separate copyright and license:
|
||||
|
||||
Copyright (c) 2014 Couchbase, Inc.
|
||||
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
|
||||
except in compliance with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software distributed under the
|
||||
License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
|
||||
either express or implied. See the License for the specific language governing permissions
|
||||
and limitations under the License.
|
||||
|
||||
---------
|
||||
|
||||
Grapheme break data is provided as part of the Unicode character database,
|
||||
copright 2016 Unicode, Inc, which is provided with the following license:
|
||||
|
||||
Unicode Data Files include all data files under the directories
|
||||
http://www.unicode.org/Public/, http://www.unicode.org/reports/,
|
||||
http://www.unicode.org/cldr/data/, http://source.icu-project.org/repos/icu/, and
|
||||
http://www.unicode.org/utility/trac/browser/.
|
||||
|
||||
Unicode Data Files do not include PDF online code charts under the
|
||||
directory http://www.unicode.org/Public/.
|
||||
|
||||
Software includes any source code published in the Unicode Standard
|
||||
or under the directories
|
||||
http://www.unicode.org/Public/, http://www.unicode.org/reports/,
|
||||
http://www.unicode.org/cldr/data/, http://source.icu-project.org/repos/icu/, and
|
||||
http://www.unicode.org/utility/trac/browser/.
|
||||
|
||||
NOTICE TO USER: Carefully read the following legal agreement.
|
||||
BY DOWNLOADING, INSTALLING, COPYING OR OTHERWISE USING UNICODE INC.'S
|
||||
DATA FILES ("DATA FILES"), AND/OR SOFTWARE ("SOFTWARE"),
|
||||
YOU UNEQUIVOCALLY ACCEPT, AND AGREE TO BE BOUND BY, ALL OF THE
|
||||
TERMS AND CONDITIONS OF THIS AGREEMENT.
|
||||
IF YOU DO NOT AGREE, DO NOT DOWNLOAD, INSTALL, COPY, DISTRIBUTE OR USE
|
||||
THE DATA FILES OR SOFTWARE.
|
||||
|
||||
COPYRIGHT AND PERMISSION NOTICE
|
||||
|
||||
Copyright © 1991-2017 Unicode, Inc. All rights reserved.
|
||||
Distributed under the Terms of Use in http://www.unicode.org/copyright.html.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of the Unicode data files and any associated documentation
|
||||
(the "Data Files") or Unicode software and any associated documentation
|
||||
(the "Software") to deal in the Data Files or Software
|
||||
without restriction, including without limitation the rights to use,
|
||||
copy, modify, merge, publish, distribute, and/or sell copies of
|
||||
the Data Files or Software, and to permit persons to whom the Data Files
|
||||
or Software are furnished to do so, provided that either
|
||||
(a) this copyright and permission notice appear with all copies
|
||||
of the Data Files or Software, or
|
||||
(b) this copyright and permission notice appear in associated
|
||||
Documentation.
|
||||
|
||||
THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF
|
||||
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
|
||||
WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT OF THIRD PARTY RIGHTS.
|
||||
IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS
|
||||
NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL
|
||||
DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
|
||||
DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
|
||||
TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
|
||||
PERFORMANCE OF THE DATA FILES OR SOFTWARE.
|
||||
|
||||
Except as contained in this notice, the name of a copyright holder
|
||||
shall not be used in advertising or otherwise to promote the sale,
|
||||
use or other dealings in these Data Files or Software without prior
|
||||
written authorization of the copyright holder.
|
30
vendor/github.com/apparentlymart/go-textseg/v13/textseg/all_tokens.go
generated
vendored
Normal file
30
vendor/github.com/apparentlymart/go-textseg/v13/textseg/all_tokens.go
generated
vendored
Normal file
|
@ -0,0 +1,30 @@
|
|||
package textseg
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
)
|
||||
|
||||
// AllTokens is a utility that uses a bufio.SplitFunc to produce a slice of
|
||||
// all of the recognized tokens in the given buffer.
|
||||
func AllTokens(buf []byte, splitFunc bufio.SplitFunc) ([][]byte, error) {
|
||||
scanner := bufio.NewScanner(bytes.NewReader(buf))
|
||||
scanner.Split(splitFunc)
|
||||
var ret [][]byte
|
||||
for scanner.Scan() {
|
||||
ret = append(ret, scanner.Bytes())
|
||||
}
|
||||
return ret, scanner.Err()
|
||||
}
|
||||
|
||||
// TokenCount is a utility that uses a bufio.SplitFunc to count the number of
|
||||
// recognized tokens in the given buffer.
|
||||
func TokenCount(buf []byte, splitFunc bufio.SplitFunc) (int, error) {
|
||||
scanner := bufio.NewScanner(bytes.NewReader(buf))
|
||||
scanner.Split(splitFunc)
|
||||
var ret int
|
||||
for scanner.Scan() {
|
||||
ret++
|
||||
}
|
||||
return ret, scanner.Err()
|
||||
}
|
525
vendor/github.com/apparentlymart/go-textseg/v13/textseg/emoji_table.rl
generated
vendored
Normal file
525
vendor/github.com/apparentlymart/go-textseg/v13/textseg/emoji_table.rl
generated
vendored
Normal file
|
@ -0,0 +1,525 @@
|
|||
# The following Ragel file was autogenerated with unicode2ragel.rb
|
||||
# from: https://www.unicode.org/Public/13.0.0/ucd/emoji/emoji-data.txt
|
||||
#
|
||||
# It defines ["Extended_Pictographic"].
|
||||
#
|
||||
# To use this, make sure that your alphtype is set to byte,
|
||||
# and that your input is in utf8.
|
||||
|
||||
%%{
|
||||
machine Emoji;
|
||||
|
||||
Extended_Pictographic =
|
||||
0xC2 0xA9 #E0.6 [1] (©️) copyright
|
||||
| 0xC2 0xAE #E0.6 [1] (®️) registered
|
||||
| 0xE2 0x80 0xBC #E0.6 [1] (‼️) double exclamation mark
|
||||
| 0xE2 0x81 0x89 #E0.6 [1] (⁉️) exclamation question ...
|
||||
| 0xE2 0x84 0xA2 #E0.6 [1] (™️) trade mark
|
||||
| 0xE2 0x84 0xB9 #E0.6 [1] (ℹ️) information
|
||||
| 0xE2 0x86 0x94..0x99 #E0.6 [6] (↔️..↙️) left-right arrow..do...
|
||||
| 0xE2 0x86 0xA9..0xAA #E0.6 [2] (↩️..↪️) right arrow curving ...
|
||||
| 0xE2 0x8C 0x9A..0x9B #E0.6 [2] (⌚..⌛) watch..hourglass done
|
||||
| 0xE2 0x8C 0xA8 #E1.0 [1] (⌨️) keyboard
|
||||
| 0xE2 0x8E 0x88 #E0.0 [1] (⎈) HELM SYMBOL
|
||||
| 0xE2 0x8F 0x8F #E1.0 [1] (⏏️) eject button
|
||||
| 0xE2 0x8F 0xA9..0xAC #E0.6 [4] (⏩..⏬) fast-forward button..f...
|
||||
| 0xE2 0x8F 0xAD..0xAE #E0.7 [2] (⏭️..⏮️) next track button..l...
|
||||
| 0xE2 0x8F 0xAF #E1.0 [1] (⏯️) play or pause button
|
||||
| 0xE2 0x8F 0xB0 #E0.6 [1] (⏰) alarm clock
|
||||
| 0xE2 0x8F 0xB1..0xB2 #E1.0 [2] (⏱️..⏲️) stopwatch..timer clock
|
||||
| 0xE2 0x8F 0xB3 #E0.6 [1] (⏳) hourglass not done
|
||||
| 0xE2 0x8F 0xB8..0xBA #E0.7 [3] (⏸️..⏺️) pause button..record...
|
||||
| 0xE2 0x93 0x82 #E0.6 [1] (Ⓜ️) circled M
|
||||
| 0xE2 0x96 0xAA..0xAB #E0.6 [2] (▪️..▫️) black small square.....
|
||||
| 0xE2 0x96 0xB6 #E0.6 [1] (▶️) play button
|
||||
| 0xE2 0x97 0x80 #E0.6 [1] (◀️) reverse button
|
||||
| 0xE2 0x97 0xBB..0xBE #E0.6 [4] (◻️..◾) white medium square.....
|
||||
| 0xE2 0x98 0x80..0x81 #E0.6 [2] (☀️..☁️) sun..cloud
|
||||
| 0xE2 0x98 0x82..0x83 #E0.7 [2] (☂️..☃️) umbrella..snowman
|
||||
| 0xE2 0x98 0x84 #E1.0 [1] (☄️) comet
|
||||
| 0xE2 0x98 0x85 #E0.0 [1] (★) BLACK STAR
|
||||
| 0xE2 0x98 0x87..0x8D #E0.0 [7] (☇..☍) LIGHTNING..OPPOSITION
|
||||
| 0xE2 0x98 0x8E #E0.6 [1] (☎️) telephone
|
||||
| 0xE2 0x98 0x8F..0x90 #E0.0 [2] (☏..☐) WHITE TELEPHONE..BALLO...
|
||||
| 0xE2 0x98 0x91 #E0.6 [1] (☑️) check box with check
|
||||
| 0xE2 0x98 0x92 #E0.0 [1] (☒) BALLOT BOX WITH X
|
||||
| 0xE2 0x98 0x94..0x95 #E0.6 [2] (☔..☕) umbrella with rain dro...
|
||||
| 0xE2 0x98 0x96..0x97 #E0.0 [2] (☖..☗) WHITE SHOGI PIECE..BLA...
|
||||
| 0xE2 0x98 0x98 #E1.0 [1] (☘️) shamrock
|
||||
| 0xE2 0x98 0x99..0x9C #E0.0 [4] (☙..☜) REVERSED ROTATED FLORA...
|
||||
| 0xE2 0x98 0x9D #E0.6 [1] (☝️) index pointing up
|
||||
| 0xE2 0x98 0x9E..0x9F #E0.0 [2] (☞..☟) WHITE RIGHT POINTING I...
|
||||
| 0xE2 0x98 0xA0 #E1.0 [1] (☠️) skull and crossbones
|
||||
| 0xE2 0x98 0xA1 #E0.0 [1] (☡) CAUTION SIGN
|
||||
| 0xE2 0x98 0xA2..0xA3 #E1.0 [2] (☢️..☣️) radioactive..biohazard
|
||||
| 0xE2 0x98 0xA4..0xA5 #E0.0 [2] (☤..☥) CADUCEUS..ANKH
|
||||
| 0xE2 0x98 0xA6 #E1.0 [1] (☦️) orthodox cross
|
||||
| 0xE2 0x98 0xA7..0xA9 #E0.0 [3] (☧..☩) CHI RHO..CROSS OF JERU...
|
||||
| 0xE2 0x98 0xAA #E0.7 [1] (☪️) star and crescent
|
||||
| 0xE2 0x98 0xAB..0xAD #E0.0 [3] (☫..☭) FARSI SYMBOL..HAMMER A...
|
||||
| 0xE2 0x98 0xAE #E1.0 [1] (☮️) peace symbol
|
||||
| 0xE2 0x98 0xAF #E0.7 [1] (☯️) yin yang
|
||||
| 0xE2 0x98 0xB0..0xB7 #E0.0 [8] (☰..☷) TRIGRAM FOR HEAVEN..TR...
|
||||
| 0xE2 0x98 0xB8..0xB9 #E0.7 [2] (☸️..☹️) wheel of dharma..fro...
|
||||
| 0xE2 0x98 0xBA #E0.6 [1] (☺️) smiling face
|
||||
| 0xE2 0x98 0xBB..0xBF #E0.0 [5] (☻..☿) BLACK SMILING FACE..ME...
|
||||
| 0xE2 0x99 0x80 #E4.0 [1] (♀️) female sign
|
||||
| 0xE2 0x99 0x81 #E0.0 [1] (♁) EARTH
|
||||
| 0xE2 0x99 0x82 #E4.0 [1] (♂️) male sign
|
||||
| 0xE2 0x99 0x83..0x87 #E0.0 [5] (♃..♇) JUPITER..PLUTO
|
||||
| 0xE2 0x99 0x88..0x93 #E0.6 [12] (♈..♓) Aries..Pisces
|
||||
| 0xE2 0x99 0x94..0x9E #E0.0 [11] (♔..♞) WHITE CHESS KING..BLAC...
|
||||
| 0xE2 0x99 0x9F #E11.0 [1] (♟️) chess pawn
|
||||
| 0xE2 0x99 0xA0 #E0.6 [1] (♠️) spade suit
|
||||
| 0xE2 0x99 0xA1..0xA2 #E0.0 [2] (♡..♢) WHITE HEART SUIT..WHIT...
|
||||
| 0xE2 0x99 0xA3 #E0.6 [1] (♣️) club suit
|
||||
| 0xE2 0x99 0xA4 #E0.0 [1] (♤) WHITE SPADE SUIT
|
||||
| 0xE2 0x99 0xA5..0xA6 #E0.6 [2] (♥️..♦️) heart suit..diamond ...
|
||||
| 0xE2 0x99 0xA7 #E0.0 [1] (♧) WHITE CLUB SUIT
|
||||
| 0xE2 0x99 0xA8 #E0.6 [1] (♨️) hot springs
|
||||
| 0xE2 0x99 0xA9..0xBA #E0.0 [18] (♩..♺) QUARTER NOTE..RECYCLIN...
|
||||
| 0xE2 0x99 0xBB #E0.6 [1] (♻️) recycling symbol
|
||||
| 0xE2 0x99 0xBC..0xBD #E0.0 [2] (♼..♽) RECYCLED PAPER SYMBOL....
|
||||
| 0xE2 0x99 0xBE #E11.0 [1] (♾️) infinity
|
||||
| 0xE2 0x99 0xBF #E0.6 [1] (♿) wheelchair symbol
|
||||
| 0xE2 0x9A 0x80..0x85 #E0.0 [6] (⚀..⚅) DIE FACE-1..DIE FACE-6
|
||||
| 0xE2 0x9A 0x90..0x91 #E0.0 [2] (⚐..⚑) WHITE FLAG..BLACK FLAG
|
||||
| 0xE2 0x9A 0x92 #E1.0 [1] (⚒️) hammer and pick
|
||||
| 0xE2 0x9A 0x93 #E0.6 [1] (⚓) anchor
|
||||
| 0xE2 0x9A 0x94 #E1.0 [1] (⚔️) crossed swords
|
||||
| 0xE2 0x9A 0x95 #E4.0 [1] (⚕️) medical symbol
|
||||
| 0xE2 0x9A 0x96..0x97 #E1.0 [2] (⚖️..⚗️) balance scale..alembic
|
||||
| 0xE2 0x9A 0x98 #E0.0 [1] (⚘) FLOWER
|
||||
| 0xE2 0x9A 0x99 #E1.0 [1] (⚙️) gear
|
||||
| 0xE2 0x9A 0x9A #E0.0 [1] (⚚) STAFF OF HERMES
|
||||
| 0xE2 0x9A 0x9B..0x9C #E1.0 [2] (⚛️..⚜️) atom symbol..fleur-d...
|
||||
| 0xE2 0x9A 0x9D..0x9F #E0.0 [3] (⚝..⚟) OUTLINED WHITE STAR..T...
|
||||
| 0xE2 0x9A 0xA0..0xA1 #E0.6 [2] (⚠️..⚡) warning..high voltage
|
||||
| 0xE2 0x9A 0xA2..0xA6 #E0.0 [5] (⚢..⚦) DOUBLED FEMALE SIGN..M...
|
||||
| 0xE2 0x9A 0xA7 #E13.0 [1] (⚧️) transgender symbol
|
||||
| 0xE2 0x9A 0xA8..0xA9 #E0.0 [2] (⚨..⚩) VERTICAL MALE WITH STR...
|
||||
| 0xE2 0x9A 0xAA..0xAB #E0.6 [2] (⚪..⚫) white circle..black ci...
|
||||
| 0xE2 0x9A 0xAC..0xAF #E0.0 [4] (⚬..⚯) MEDIUM SMALL WHITE CIR...
|
||||
| 0xE2 0x9A 0xB0..0xB1 #E1.0 [2] (⚰️..⚱️) coffin..funeral urn
|
||||
| 0xE2 0x9A 0xB2..0xBC #E0.0 [11] (⚲..⚼) NEUTER..SESQUIQUADRATE
|
||||
| 0xE2 0x9A 0xBD..0xBE #E0.6 [2] (⚽..⚾) soccer ball..baseball
|
||||
| 0xE2 0x9A 0xBF..0xFF #E0.0 [5] (⚿..⛃) SQUARED KEY..BLACK DRA...
|
||||
| 0xE2 0x9B 0x00..0x83 #
|
||||
| 0xE2 0x9B 0x84..0x85 #E0.6 [2] (⛄..⛅) snowman without snow.....
|
||||
| 0xE2 0x9B 0x86..0x87 #E0.0 [2] (⛆..⛇) RAIN..BLACK SNOWMAN
|
||||
| 0xE2 0x9B 0x88 #E0.7 [1] (⛈️) cloud with lightning ...
|
||||
| 0xE2 0x9B 0x89..0x8D #E0.0 [5] (⛉..⛍) TURNED WHITE SHOGI PIE...
|
||||
| 0xE2 0x9B 0x8E #E0.6 [1] (⛎) Ophiuchus
|
||||
| 0xE2 0x9B 0x8F #E0.7 [1] (⛏️) pick
|
||||
| 0xE2 0x9B 0x90 #E0.0 [1] (⛐) CAR SLIDING
|
||||
| 0xE2 0x9B 0x91 #E0.7 [1] (⛑️) rescue worker’s helmet
|
||||
| 0xE2 0x9B 0x92 #E0.0 [1] (⛒) CIRCLED CROSSING LANES
|
||||
| 0xE2 0x9B 0x93 #E0.7 [1] (⛓️) chains
|
||||
| 0xE2 0x9B 0x94 #E0.6 [1] (⛔) no entry
|
||||
| 0xE2 0x9B 0x95..0xA8 #E0.0 [20] (⛕..⛨) ALTERNATE ONE-WAY LEFT...
|
||||
| 0xE2 0x9B 0xA9 #E0.7 [1] (⛩️) shinto shrine
|
||||
| 0xE2 0x9B 0xAA #E0.6 [1] (⛪) church
|
||||
| 0xE2 0x9B 0xAB..0xAF #E0.0 [5] (⛫..⛯) CASTLE..MAP SYMBOL FOR...
|
||||
| 0xE2 0x9B 0xB0..0xB1 #E0.7 [2] (⛰️..⛱️) mountain..umbrella o...
|
||||
| 0xE2 0x9B 0xB2..0xB3 #E0.6 [2] (⛲..⛳) fountain..flag in hole
|
||||
| 0xE2 0x9B 0xB4 #E0.7 [1] (⛴️) ferry
|
||||
| 0xE2 0x9B 0xB5 #E0.6 [1] (⛵) sailboat
|
||||
| 0xE2 0x9B 0xB6 #E0.0 [1] (⛶) SQUARE FOUR CORNERS
|
||||
| 0xE2 0x9B 0xB7..0xB9 #E0.7 [3] (⛷️..⛹️) skier..person bounci...
|
||||
| 0xE2 0x9B 0xBA #E0.6 [1] (⛺) tent
|
||||
| 0xE2 0x9B 0xBB..0xBC #E0.0 [2] (⛻..⛼) JAPANESE BANK SYMBOL.....
|
||||
| 0xE2 0x9B 0xBD #E0.6 [1] (⛽) fuel pump
|
||||
| 0xE2 0x9B 0xBE..0xFF #E0.0 [4] (⛾..✁) CUP ON BLACK SQUARE..U...
|
||||
| 0xE2 0x9C 0x00..0x81 #
|
||||
| 0xE2 0x9C 0x82 #E0.6 [1] (✂️) scissors
|
||||
| 0xE2 0x9C 0x83..0x84 #E0.0 [2] (✃..✄) LOWER BLADE SCISSORS.....
|
||||
| 0xE2 0x9C 0x85 #E0.6 [1] (✅) check mark button
|
||||
| 0xE2 0x9C 0x88..0x8C #E0.6 [5] (✈️..✌️) airplane..victory hand
|
||||
| 0xE2 0x9C 0x8D #E0.7 [1] (✍️) writing hand
|
||||
| 0xE2 0x9C 0x8E #E0.0 [1] (✎) LOWER RIGHT PENCIL
|
||||
| 0xE2 0x9C 0x8F #E0.6 [1] (✏️) pencil
|
||||
| 0xE2 0x9C 0x90..0x91 #E0.0 [2] (✐..✑) UPPER RIGHT PENCIL..WH...
|
||||
| 0xE2 0x9C 0x92 #E0.6 [1] (✒️) black nib
|
||||
| 0xE2 0x9C 0x94 #E0.6 [1] (✔️) check mark
|
||||
| 0xE2 0x9C 0x96 #E0.6 [1] (✖️) multiply
|
||||
| 0xE2 0x9C 0x9D #E0.7 [1] (✝️) latin cross
|
||||
| 0xE2 0x9C 0xA1 #E0.7 [1] (✡️) star of David
|
||||
| 0xE2 0x9C 0xA8 #E0.6 [1] (✨) sparkles
|
||||
| 0xE2 0x9C 0xB3..0xB4 #E0.6 [2] (✳️..✴️) eight-spoked asteris...
|
||||
| 0xE2 0x9D 0x84 #E0.6 [1] (❄️) snowflake
|
||||
| 0xE2 0x9D 0x87 #E0.6 [1] (❇️) sparkle
|
||||
| 0xE2 0x9D 0x8C #E0.6 [1] (❌) cross mark
|
||||
| 0xE2 0x9D 0x8E #E0.6 [1] (❎) cross mark button
|
||||
| 0xE2 0x9D 0x93..0x95 #E0.6 [3] (❓..❕) question mark..white e...
|
||||
| 0xE2 0x9D 0x97 #E0.6 [1] (❗) exclamation mark
|
||||
| 0xE2 0x9D 0xA3 #E1.0 [1] (❣️) heart exclamation
|
||||
| 0xE2 0x9D 0xA4 #E0.6 [1] (❤️) red heart
|
||||
| 0xE2 0x9D 0xA5..0xA7 #E0.0 [3] (❥..❧) ROTATED HEAVY BLACK HE...
|
||||
| 0xE2 0x9E 0x95..0x97 #E0.6 [3] (➕..➗) plus..divide
|
||||
| 0xE2 0x9E 0xA1 #E0.6 [1] (➡️) right arrow
|
||||
| 0xE2 0x9E 0xB0 #E0.6 [1] (➰) curly loop
|
||||
| 0xE2 0x9E 0xBF #E1.0 [1] (➿) double curly loop
|
||||
| 0xE2 0xA4 0xB4..0xB5 #E0.6 [2] (⤴️..⤵️) right arrow curving ...
|
||||
| 0xE2 0xAC 0x85..0x87 #E0.6 [3] (⬅️..⬇️) left arrow..down arrow
|
||||
| 0xE2 0xAC 0x9B..0x9C #E0.6 [2] (⬛..⬜) black large square..wh...
|
||||
| 0xE2 0xAD 0x90 #E0.6 [1] (⭐) star
|
||||
| 0xE2 0xAD 0x95 #E0.6 [1] (⭕) hollow red circle
|
||||
| 0xE3 0x80 0xB0 #E0.6 [1] (〰️) wavy dash
|
||||
| 0xE3 0x80 0xBD #E0.6 [1] (〽️) part alternation mark
|
||||
| 0xE3 0x8A 0x97 #E0.6 [1] (㊗️) Japanese “congratulat...
|
||||
| 0xE3 0x8A 0x99 #E0.6 [1] (㊙️) Japanese “secret” button
|
||||
| 0xF0 0x9F 0x80 0x80..0x83 #E0.0 [4] (🀀..🀃) MAHJONG TILE EAST W...
|
||||
| 0xF0 0x9F 0x80 0x84 #E0.6 [1] (🀄) mahjong red dragon
|
||||
| 0xF0 0x9F 0x80 0x85..0xFF #E0.0 [202] (🀅..🃎) MAHJONG TILE ...
|
||||
| 0xF0 0x9F 0x81..0x82 0x00..0xFF #
|
||||
| 0xF0 0x9F 0x83 0x00..0x8E #
|
||||
| 0xF0 0x9F 0x83 0x8F #E0.6 [1] (🃏) joker
|
||||
| 0xF0 0x9F 0x83 0x90..0xBF #E0.0 [48] (..) <reserved-1F0D0>..<...
|
||||
| 0xF0 0x9F 0x84 0x8D..0x8F #E0.0 [3] (🄍..🄏) CIRCLED ZERO WITH S...
|
||||
| 0xF0 0x9F 0x84 0xAF #E0.0 [1] (🄯) COPYLEFT SYMBOL
|
||||
| 0xF0 0x9F 0x85 0xAC..0xAF #E0.0 [4] (🅬..🅯) RAISED MR SIGN..CIR...
|
||||
| 0xF0 0x9F 0x85 0xB0..0xB1 #E0.6 [2] (🅰️..🅱️) A button (blood t...
|
||||
| 0xF0 0x9F 0x85 0xBE..0xBF #E0.6 [2] (🅾️..🅿️) O button (blood t...
|
||||
| 0xF0 0x9F 0x86 0x8E #E0.6 [1] (🆎) AB button (blood type)
|
||||
| 0xF0 0x9F 0x86 0x91..0x9A #E0.6 [10] (🆑..🆚) CL button..VS button
|
||||
| 0xF0 0x9F 0x86 0xAD..0xFF #E0.0 [57] (🆭..) MASK WORK SYMBOL..<...
|
||||
| 0xF0 0x9F 0x87 0x00..0xA5 #
|
||||
| 0xF0 0x9F 0x88 0x81..0x82 #E0.6 [2] (🈁..🈂️) Japanese “here” bu...
|
||||
| 0xF0 0x9F 0x88 0x83..0x8F #E0.0 [13] (..) <reserved-1F203>..<...
|
||||
| 0xF0 0x9F 0x88 0x9A #E0.6 [1] (🈚) Japanese “free of char...
|
||||
| 0xF0 0x9F 0x88 0xAF #E0.6 [1] (🈯) Japanese “reserved” bu...
|
||||
| 0xF0 0x9F 0x88 0xB2..0xBA #E0.6 [9] (🈲..🈺) Japanese “prohibite...
|
||||
| 0xF0 0x9F 0x88 0xBC..0xBF #E0.0 [4] (..) <reserved-1F23C>..<...
|
||||
| 0xF0 0x9F 0x89 0x89..0x8F #E0.0 [7] (..) <reserved-1F249>..<...
|
||||
| 0xF0 0x9F 0x89 0x90..0x91 #E0.6 [2] (🉐..🉑) Japanese “bargain” ...
|
||||
| 0xF0 0x9F 0x89 0x92..0xFF #E0.0 [174] (..) <reserved-1F2...
|
||||
| 0xF0 0x9F 0x8A..0x8A 0x00..0xFF #
|
||||
| 0xF0 0x9F 0x8B 0x00..0xBF #
|
||||
| 0xF0 0x9F 0x8C 0x80..0x8C #E0.6 [13] (🌀..🌌) cyclone..milky way
|
||||
| 0xF0 0x9F 0x8C 0x8D..0x8E #E0.7 [2] (🌍..🌎) globe showing Europ...
|
||||
| 0xF0 0x9F 0x8C 0x8F #E0.6 [1] (🌏) globe showing Asia-Aus...
|
||||
| 0xF0 0x9F 0x8C 0x90 #E1.0 [1] (🌐) globe with meridians
|
||||
| 0xF0 0x9F 0x8C 0x91 #E0.6 [1] (🌑) new moon
|
||||
| 0xF0 0x9F 0x8C 0x92 #E1.0 [1] (🌒) waxing crescent moon
|
||||
| 0xF0 0x9F 0x8C 0x93..0x95 #E0.6 [3] (🌓..🌕) first quarter moon....
|
||||
| 0xF0 0x9F 0x8C 0x96..0x98 #E1.0 [3] (🌖..🌘) waning gibbous moon...
|
||||
| 0xF0 0x9F 0x8C 0x99 #E0.6 [1] (🌙) crescent moon
|
||||
| 0xF0 0x9F 0x8C 0x9A #E1.0 [1] (🌚) new moon face
|
||||
| 0xF0 0x9F 0x8C 0x9B #E0.6 [1] (🌛) first quarter moon face
|
||||
| 0xF0 0x9F 0x8C 0x9C #E0.7 [1] (🌜) last quarter moon face
|
||||
| 0xF0 0x9F 0x8C 0x9D..0x9E #E1.0 [2] (🌝..🌞) full moon face..sun...
|
||||
| 0xF0 0x9F 0x8C 0x9F..0xA0 #E0.6 [2] (🌟..🌠) glowing star..shoot...
|
||||
| 0xF0 0x9F 0x8C 0xA1 #E0.7 [1] (🌡️) thermometer
|
||||
| 0xF0 0x9F 0x8C 0xA2..0xA3 #E0.0 [2] (🌢..🌣) BLACK DROPLET..WHIT...
|
||||
| 0xF0 0x9F 0x8C 0xA4..0xAC #E0.7 [9] (🌤️..🌬️) sun behind small ...
|
||||
| 0xF0 0x9F 0x8C 0xAD..0xAF #E1.0 [3] (🌭..🌯) hot dog..burrito
|
||||
| 0xF0 0x9F 0x8C 0xB0..0xB1 #E0.6 [2] (🌰..🌱) chestnut..seedling
|
||||
| 0xF0 0x9F 0x8C 0xB2..0xB3 #E1.0 [2] (🌲..🌳) evergreen tree..dec...
|
||||
| 0xF0 0x9F 0x8C 0xB4..0xB5 #E0.6 [2] (🌴..🌵) palm tree..cactus
|
||||
| 0xF0 0x9F 0x8C 0xB6 #E0.7 [1] (🌶️) hot pepper
|
||||
| 0xF0 0x9F 0x8C 0xB7..0xFF #E0.6 [20] (🌷..🍊) tulip..tangerine
|
||||
| 0xF0 0x9F 0x8D 0x00..0x8A #
|
||||
| 0xF0 0x9F 0x8D 0x8B #E1.0 [1] (🍋) lemon
|
||||
| 0xF0 0x9F 0x8D 0x8C..0x8F #E0.6 [4] (🍌..🍏) banana..green apple
|
||||
| 0xF0 0x9F 0x8D 0x90 #E1.0 [1] (🍐) pear
|
||||
| 0xF0 0x9F 0x8D 0x91..0xBB #E0.6 [43] (🍑..🍻) peach..clinking bee...
|
||||
| 0xF0 0x9F 0x8D 0xBC #E1.0 [1] (🍼) baby bottle
|
||||
| 0xF0 0x9F 0x8D 0xBD #E0.7 [1] (🍽️) fork and knife with p...
|
||||
| 0xF0 0x9F 0x8D 0xBE..0xBF #E1.0 [2] (🍾..🍿) bottle with popping...
|
||||
| 0xF0 0x9F 0x8E 0x80..0x93 #E0.6 [20] (🎀..🎓) ribbon..graduation cap
|
||||
| 0xF0 0x9F 0x8E 0x94..0x95 #E0.0 [2] (🎔..🎕) HEART WITH TIP ON T...
|
||||
| 0xF0 0x9F 0x8E 0x96..0x97 #E0.7 [2] (🎖️..🎗️) military medal..r...
|
||||
| 0xF0 0x9F 0x8E 0x98 #E0.0 [1] (🎘) MUSICAL KEYBOARD WITH ...
|
||||
| 0xF0 0x9F 0x8E 0x99..0x9B #E0.7 [3] (🎙️..🎛️) studio microphone...
|
||||
| 0xF0 0x9F 0x8E 0x9C..0x9D #E0.0 [2] (🎜..🎝) BEAMED ASCENDING MU...
|
||||
| 0xF0 0x9F 0x8E 0x9E..0x9F #E0.7 [2] (🎞️..🎟️) film frames..admi...
|
||||
| 0xF0 0x9F 0x8E 0xA0..0xFF #E0.6 [37] (🎠..🏄) carousel horse..per...
|
||||
| 0xF0 0x9F 0x8F 0x00..0x84 #
|
||||
| 0xF0 0x9F 0x8F 0x85 #E1.0 [1] (🏅) sports medal
|
||||
| 0xF0 0x9F 0x8F 0x86 #E0.6 [1] (🏆) trophy
|
||||
| 0xF0 0x9F 0x8F 0x87 #E1.0 [1] (🏇) horse racing
|
||||
| 0xF0 0x9F 0x8F 0x88 #E0.6 [1] (🏈) american football
|
||||
| 0xF0 0x9F 0x8F 0x89 #E1.0 [1] (🏉) rugby football
|
||||
| 0xF0 0x9F 0x8F 0x8A #E0.6 [1] (🏊) person swimming
|
||||
| 0xF0 0x9F 0x8F 0x8B..0x8E #E0.7 [4] (🏋️..🏎️) person lifting we...
|
||||
| 0xF0 0x9F 0x8F 0x8F..0x93 #E1.0 [5] (🏏..🏓) cricket game..ping ...
|
||||
| 0xF0 0x9F 0x8F 0x94..0x9F #E0.7 [12] (🏔️..🏟️) snow-capped mount...
|
||||
| 0xF0 0x9F 0x8F 0xA0..0xA3 #E0.6 [4] (🏠..🏣) house..Japanese pos...
|
||||
| 0xF0 0x9F 0x8F 0xA4 #E1.0 [1] (🏤) post office
|
||||
| 0xF0 0x9F 0x8F 0xA5..0xB0 #E0.6 [12] (🏥..🏰) hospital..castle
|
||||
| 0xF0 0x9F 0x8F 0xB1..0xB2 #E0.0 [2] (🏱..🏲) WHITE PENNANT..BLAC...
|
||||
| 0xF0 0x9F 0x8F 0xB3 #E0.7 [1] (🏳️) white flag
|
||||
| 0xF0 0x9F 0x8F 0xB4 #E1.0 [1] (🏴) black flag
|
||||
| 0xF0 0x9F 0x8F 0xB5 #E0.7 [1] (🏵️) rosette
|
||||
| 0xF0 0x9F 0x8F 0xB6 #E0.0 [1] (🏶) BLACK ROSETTE
|
||||
| 0xF0 0x9F 0x8F 0xB7 #E0.7 [1] (🏷️) label
|
||||
| 0xF0 0x9F 0x8F 0xB8..0xBA #E1.0 [3] (🏸..🏺) badminton..amphora
|
||||
| 0xF0 0x9F 0x90 0x80..0x87 #E1.0 [8] (🐀..🐇) rat..rabbit
|
||||
| 0xF0 0x9F 0x90 0x88 #E0.7 [1] (🐈) cat
|
||||
| 0xF0 0x9F 0x90 0x89..0x8B #E1.0 [3] (🐉..🐋) dragon..whale
|
||||
| 0xF0 0x9F 0x90 0x8C..0x8E #E0.6 [3] (🐌..🐎) snail..horse
|
||||
| 0xF0 0x9F 0x90 0x8F..0x90 #E1.0 [2] (🐏..🐐) ram..goat
|
||||
| 0xF0 0x9F 0x90 0x91..0x92 #E0.6 [2] (🐑..🐒) ewe..monkey
|
||||
| 0xF0 0x9F 0x90 0x93 #E1.0 [1] (🐓) rooster
|
||||
| 0xF0 0x9F 0x90 0x94 #E0.6 [1] (🐔) chicken
|
||||
| 0xF0 0x9F 0x90 0x95 #E0.7 [1] (🐕) dog
|
||||
| 0xF0 0x9F 0x90 0x96 #E1.0 [1] (🐖) pig
|
||||
| 0xF0 0x9F 0x90 0x97..0xA9 #E0.6 [19] (🐗..🐩) boar..poodle
|
||||
| 0xF0 0x9F 0x90 0xAA #E1.0 [1] (🐪) camel
|
||||
| 0xF0 0x9F 0x90 0xAB..0xBE #E0.6 [20] (🐫..🐾) two-hump camel..paw...
|
||||
| 0xF0 0x9F 0x90 0xBF #E0.7 [1] (🐿️) chipmunk
|
||||
| 0xF0 0x9F 0x91 0x80 #E0.6 [1] (👀) eyes
|
||||
| 0xF0 0x9F 0x91 0x81 #E0.7 [1] (👁️) eye
|
||||
| 0xF0 0x9F 0x91 0x82..0xA4 #E0.6 [35] (👂..👤) ear..bust in silhou...
|
||||
| 0xF0 0x9F 0x91 0xA5 #E1.0 [1] (👥) busts in silhouette
|
||||
| 0xF0 0x9F 0x91 0xA6..0xAB #E0.6 [6] (👦..👫) boy..woman and man ...
|
||||
| 0xF0 0x9F 0x91 0xAC..0xAD #E1.0 [2] (👬..👭) men holding hands.....
|
||||
| 0xF0 0x9F 0x91 0xAE..0xFF #E0.6 [63] (👮..💬) police officer..spe...
|
||||
| 0xF0 0x9F 0x92 0x00..0xAC #
|
||||
| 0xF0 0x9F 0x92 0xAD #E1.0 [1] (💭) thought balloon
|
||||
| 0xF0 0x9F 0x92 0xAE..0xB5 #E0.6 [8] (💮..💵) white flower..dolla...
|
||||
| 0xF0 0x9F 0x92 0xB6..0xB7 #E1.0 [2] (💶..💷) euro banknote..poun...
|
||||
| 0xF0 0x9F 0x92 0xB8..0xFF #E0.6 [52] (💸..📫) money with wings..c...
|
||||
| 0xF0 0x9F 0x93 0x00..0xAB #
|
||||
| 0xF0 0x9F 0x93 0xAC..0xAD #E0.7 [2] (📬..📭) open mailbox with r...
|
||||
| 0xF0 0x9F 0x93 0xAE #E0.6 [1] (📮) postbox
|
||||
| 0xF0 0x9F 0x93 0xAF #E1.0 [1] (📯) postal horn
|
||||
| 0xF0 0x9F 0x93 0xB0..0xB4 #E0.6 [5] (📰..📴) newspaper..mobile p...
|
||||
| 0xF0 0x9F 0x93 0xB5 #E1.0 [1] (📵) no mobile phones
|
||||
| 0xF0 0x9F 0x93 0xB6..0xB7 #E0.6 [2] (📶..📷) antenna bars..camera
|
||||
| 0xF0 0x9F 0x93 0xB8 #E1.0 [1] (📸) camera with flash
|
||||
| 0xF0 0x9F 0x93 0xB9..0xBC #E0.6 [4] (📹..📼) video camera..video...
|
||||
| 0xF0 0x9F 0x93 0xBD #E0.7 [1] (📽️) film projector
|
||||
| 0xF0 0x9F 0x93 0xBE #E0.0 [1] (📾) PORTABLE STEREO
|
||||
| 0xF0 0x9F 0x93 0xBF..0xFF #E1.0 [4] (📿..🔂) prayer beads..repea...
|
||||
| 0xF0 0x9F 0x94 0x00..0x82 #
|
||||
| 0xF0 0x9F 0x94 0x83 #E0.6 [1] (🔃) clockwise vertical arrows
|
||||
| 0xF0 0x9F 0x94 0x84..0x87 #E1.0 [4] (🔄..🔇) counterclockwise ar...
|
||||
| 0xF0 0x9F 0x94 0x88 #E0.7 [1] (🔈) speaker low volume
|
||||
| 0xF0 0x9F 0x94 0x89 #E1.0 [1] (🔉) speaker medium volume
|
||||
| 0xF0 0x9F 0x94 0x8A..0x94 #E0.6 [11] (🔊..🔔) speaker high volume...
|
||||
| 0xF0 0x9F 0x94 0x95 #E1.0 [1] (🔕) bell with slash
|
||||
| 0xF0 0x9F 0x94 0x96..0xAB #E0.6 [22] (🔖..🔫) bookmark..pistol
|
||||
| 0xF0 0x9F 0x94 0xAC..0xAD #E1.0 [2] (🔬..🔭) microscope..telescope
|
||||
| 0xF0 0x9F 0x94 0xAE..0xBD #E0.6 [16] (🔮..🔽) crystal ball..downw...
|
||||
| 0xF0 0x9F 0x95 0x86..0x88 #E0.0 [3] (🕆..🕈) WHITE LATIN CROSS.....
|
||||
| 0xF0 0x9F 0x95 0x89..0x8A #E0.7 [2] (🕉️..🕊️) om..dove
|
||||
| 0xF0 0x9F 0x95 0x8B..0x8E #E1.0 [4] (🕋..🕎) kaaba..menorah
|
||||
| 0xF0 0x9F 0x95 0x8F #E0.0 [1] (🕏) BOWL OF HYGIEIA
|
||||
| 0xF0 0x9F 0x95 0x90..0x9B #E0.6 [12] (🕐..🕛) one o’clock..twelve...
|
||||
| 0xF0 0x9F 0x95 0x9C..0xA7 #E0.7 [12] (🕜..🕧) one-thirty..twelve-...
|
||||
| 0xF0 0x9F 0x95 0xA8..0xAE #E0.0 [7] (🕨..🕮) RIGHT SPEAKER..BOOK
|
||||
| 0xF0 0x9F 0x95 0xAF..0xB0 #E0.7 [2] (🕯️..🕰️) candle..mantelpie...
|
||||
| 0xF0 0x9F 0x95 0xB1..0xB2 #E0.0 [2] (🕱..🕲) BLACK SKULL AND CRO...
|
||||
| 0xF0 0x9F 0x95 0xB3..0xB9 #E0.7 [7] (🕳️..🕹️) hole..joystick
|
||||
| 0xF0 0x9F 0x95 0xBA #E3.0 [1] (🕺) man dancing
|
||||
| 0xF0 0x9F 0x95 0xBB..0xFF #E0.0 [12] (🕻..🖆) LEFT HAND TELEPHONE...
|
||||
| 0xF0 0x9F 0x96 0x00..0x86 #
|
||||
| 0xF0 0x9F 0x96 0x87 #E0.7 [1] (🖇️) linked paperclips
|
||||
| 0xF0 0x9F 0x96 0x88..0x89 #E0.0 [2] (🖈..🖉) BLACK PUSHPIN..LOWE...
|
||||
| 0xF0 0x9F 0x96 0x8A..0x8D #E0.7 [4] (🖊️..🖍️) pen..crayon
|
||||
| 0xF0 0x9F 0x96 0x8E..0x8F #E0.0 [2] (🖎..🖏) LEFT WRITING HAND.....
|
||||
| 0xF0 0x9F 0x96 0x90 #E0.7 [1] (🖐️) hand with fingers spl...
|
||||
| 0xF0 0x9F 0x96 0x91..0x94 #E0.0 [4] (🖑..🖔) REVERSED RAISED HAN...
|
||||
| 0xF0 0x9F 0x96 0x95..0x96 #E1.0 [2] (🖕..🖖) middle finger..vulc...
|
||||
| 0xF0 0x9F 0x96 0x97..0xA3 #E0.0 [13] (🖗..🖣) WHITE DOWN POINTING...
|
||||
| 0xF0 0x9F 0x96 0xA4 #E3.0 [1] (🖤) black heart
|
||||
| 0xF0 0x9F 0x96 0xA5 #E0.7 [1] (🖥️) desktop computer
|
||||
| 0xF0 0x9F 0x96 0xA6..0xA7 #E0.0 [2] (🖦..🖧) KEYBOARD AND MOUSE....
|
||||
| 0xF0 0x9F 0x96 0xA8 #E0.7 [1] (🖨️) printer
|
||||
| 0xF0 0x9F 0x96 0xA9..0xB0 #E0.0 [8] (🖩..🖰) POCKET CALCULATOR.....
|
||||
| 0xF0 0x9F 0x96 0xB1..0xB2 #E0.7 [2] (🖱️..🖲️) computer mouse..t...
|
||||
| 0xF0 0x9F 0x96 0xB3..0xBB #E0.0 [9] (🖳..🖻) OLD PERSONAL COMPUT...
|
||||
| 0xF0 0x9F 0x96 0xBC #E0.7 [1] (🖼️) framed picture
|
||||
| 0xF0 0x9F 0x96 0xBD..0xFF #E0.0 [5] (🖽..🗁) FRAME WITH TILES..O...
|
||||
| 0xF0 0x9F 0x97 0x00..0x81 #
|
||||
| 0xF0 0x9F 0x97 0x82..0x84 #E0.7 [3] (🗂️..🗄️) card index divide...
|
||||
| 0xF0 0x9F 0x97 0x85..0x90 #E0.0 [12] (🗅..🗐) EMPTY NOTE..PAGES
|
||||
| 0xF0 0x9F 0x97 0x91..0x93 #E0.7 [3] (🗑️..🗓️) wastebasket..spir...
|
||||
| 0xF0 0x9F 0x97 0x94..0x9B #E0.0 [8] (🗔..🗛) DESKTOP WINDOW..DEC...
|
||||
| 0xF0 0x9F 0x97 0x9C..0x9E #E0.7 [3] (🗜️..🗞️) clamp..rolled-up ...
|
||||
| 0xF0 0x9F 0x97 0x9F..0xA0 #E0.0 [2] (🗟..🗠) PAGE WITH CIRCLED T...
|
||||
| 0xF0 0x9F 0x97 0xA1 #E0.7 [1] (🗡️) dagger
|
||||
| 0xF0 0x9F 0x97 0xA2 #E0.0 [1] (🗢) LIPS
|
||||
| 0xF0 0x9F 0x97 0xA3 #E0.7 [1] (🗣️) speaking head
|
||||
| 0xF0 0x9F 0x97 0xA4..0xA7 #E0.0 [4] (🗤..🗧) THREE RAYS ABOVE..T...
|
||||
| 0xF0 0x9F 0x97 0xA8 #E2.0 [1] (🗨️) left speech bubble
|
||||
| 0xF0 0x9F 0x97 0xA9..0xAE #E0.0 [6] (🗩..🗮) RIGHT SPEECH BUBBLE...
|
||||
| 0xF0 0x9F 0x97 0xAF #E0.7 [1] (🗯️) right anger bubble
|
||||
| 0xF0 0x9F 0x97 0xB0..0xB2 #E0.0 [3] (🗰..🗲) MOOD BUBBLE..LIGHTN...
|
||||
| 0xF0 0x9F 0x97 0xB3 #E0.7 [1] (🗳️) ballot box with ballot
|
||||
| 0xF0 0x9F 0x97 0xB4..0xB9 #E0.0 [6] (🗴..🗹) BALLOT SCRIPT X..BA...
|
||||
| 0xF0 0x9F 0x97 0xBA #E0.7 [1] (🗺️) world map
|
||||
| 0xF0 0x9F 0x97 0xBB..0xBF #E0.6 [5] (🗻..🗿) mount fuji..moai
|
||||
| 0xF0 0x9F 0x98 0x80 #E1.0 [1] (😀) grinning face
|
||||
| 0xF0 0x9F 0x98 0x81..0x86 #E0.6 [6] (😁..😆) beaming face with s...
|
||||
| 0xF0 0x9F 0x98 0x87..0x88 #E1.0 [2] (😇..😈) smiling face with h...
|
||||
| 0xF0 0x9F 0x98 0x89..0x8D #E0.6 [5] (😉..😍) winking face..smili...
|
||||
| 0xF0 0x9F 0x98 0x8E #E1.0 [1] (😎) smiling face with sung...
|
||||
| 0xF0 0x9F 0x98 0x8F #E0.6 [1] (😏) smirking face
|
||||
| 0xF0 0x9F 0x98 0x90 #E0.7 [1] (😐) neutral face
|
||||
| 0xF0 0x9F 0x98 0x91 #E1.0 [1] (😑) expressionless face
|
||||
| 0xF0 0x9F 0x98 0x92..0x94 #E0.6 [3] (😒..😔) unamused face..pens...
|
||||
| 0xF0 0x9F 0x98 0x95 #E1.0 [1] (😕) confused face
|
||||
| 0xF0 0x9F 0x98 0x96 #E0.6 [1] (😖) confounded face
|
||||
| 0xF0 0x9F 0x98 0x97 #E1.0 [1] (😗) kissing face
|
||||
| 0xF0 0x9F 0x98 0x98 #E0.6 [1] (😘) face blowing a kiss
|
||||
| 0xF0 0x9F 0x98 0x99 #E1.0 [1] (😙) kissing face with smil...
|
||||
| 0xF0 0x9F 0x98 0x9A #E0.6 [1] (😚) kissing face with clos...
|
||||
| 0xF0 0x9F 0x98 0x9B #E1.0 [1] (😛) face with tongue
|
||||
| 0xF0 0x9F 0x98 0x9C..0x9E #E0.6 [3] (😜..😞) winking face with t...
|
||||
| 0xF0 0x9F 0x98 0x9F #E1.0 [1] (😟) worried face
|
||||
| 0xF0 0x9F 0x98 0xA0..0xA5 #E0.6 [6] (😠..😥) angry face..sad but...
|
||||
| 0xF0 0x9F 0x98 0xA6..0xA7 #E1.0 [2] (😦..😧) frowning face with ...
|
||||
| 0xF0 0x9F 0x98 0xA8..0xAB #E0.6 [4] (😨..😫) fearful face..tired...
|
||||
| 0xF0 0x9F 0x98 0xAC #E1.0 [1] (😬) grimacing face
|
||||
| 0xF0 0x9F 0x98 0xAD #E0.6 [1] (😭) loudly crying face
|
||||
| 0xF0 0x9F 0x98 0xAE..0xAF #E1.0 [2] (😮..😯) face with open mout...
|
||||
| 0xF0 0x9F 0x98 0xB0..0xB3 #E0.6 [4] (😰..😳) anxious face with s...
|
||||
| 0xF0 0x9F 0x98 0xB4 #E1.0 [1] (😴) sleeping face
|
||||
| 0xF0 0x9F 0x98 0xB5 #E0.6 [1] (😵) dizzy face
|
||||
| 0xF0 0x9F 0x98 0xB6 #E1.0 [1] (😶) face without mouth
|
||||
| 0xF0 0x9F 0x98 0xB7..0xFF #E0.6 [10] (😷..🙀) face with medical m...
|
||||
| 0xF0 0x9F 0x99 0x00..0x80 #
|
||||
| 0xF0 0x9F 0x99 0x81..0x84 #E1.0 [4] (🙁..🙄) slightly frowning f...
|
||||
| 0xF0 0x9F 0x99 0x85..0x8F #E0.6 [11] (🙅..🙏) person gesturing NO...
|
||||
| 0xF0 0x9F 0x9A 0x80 #E0.6 [1] (🚀) rocket
|
||||
| 0xF0 0x9F 0x9A 0x81..0x82 #E1.0 [2] (🚁..🚂) helicopter..locomotive
|
||||
| 0xF0 0x9F 0x9A 0x83..0x85 #E0.6 [3] (🚃..🚅) railway car..bullet...
|
||||
| 0xF0 0x9F 0x9A 0x86 #E1.0 [1] (🚆) train
|
||||
| 0xF0 0x9F 0x9A 0x87 #E0.6 [1] (🚇) metro
|
||||
| 0xF0 0x9F 0x9A 0x88 #E1.0 [1] (🚈) light rail
|
||||
| 0xF0 0x9F 0x9A 0x89 #E0.6 [1] (🚉) station
|
||||
| 0xF0 0x9F 0x9A 0x8A..0x8B #E1.0 [2] (🚊..🚋) tram..tram car
|
||||
| 0xF0 0x9F 0x9A 0x8C #E0.6 [1] (🚌) bus
|
||||
| 0xF0 0x9F 0x9A 0x8D #E0.7 [1] (🚍) oncoming bus
|
||||
| 0xF0 0x9F 0x9A 0x8E #E1.0 [1] (🚎) trolleybus
|
||||
| 0xF0 0x9F 0x9A 0x8F #E0.6 [1] (🚏) bus stop
|
||||
| 0xF0 0x9F 0x9A 0x90 #E1.0 [1] (🚐) minibus
|
||||
| 0xF0 0x9F 0x9A 0x91..0x93 #E0.6 [3] (🚑..🚓) ambulance..police car
|
||||
| 0xF0 0x9F 0x9A 0x94 #E0.7 [1] (🚔) oncoming police car
|
||||
| 0xF0 0x9F 0x9A 0x95 #E0.6 [1] (🚕) taxi
|
||||
| 0xF0 0x9F 0x9A 0x96 #E1.0 [1] (🚖) oncoming taxi
|
||||
| 0xF0 0x9F 0x9A 0x97 #E0.6 [1] (🚗) automobile
|
||||
| 0xF0 0x9F 0x9A 0x98 #E0.7 [1] (🚘) oncoming automobile
|
||||
| 0xF0 0x9F 0x9A 0x99..0x9A #E0.6 [2] (🚙..🚚) sport utility vehic...
|
||||
| 0xF0 0x9F 0x9A 0x9B..0xA1 #E1.0 [7] (🚛..🚡) articulated lorry.....
|
||||
| 0xF0 0x9F 0x9A 0xA2 #E0.6 [1] (🚢) ship
|
||||
| 0xF0 0x9F 0x9A 0xA3 #E1.0 [1] (🚣) person rowing boat
|
||||
| 0xF0 0x9F 0x9A 0xA4..0xA5 #E0.6 [2] (🚤..🚥) speedboat..horizont...
|
||||
| 0xF0 0x9F 0x9A 0xA6 #E1.0 [1] (🚦) vertical traffic light
|
||||
| 0xF0 0x9F 0x9A 0xA7..0xAD #E0.6 [7] (🚧..🚭) construction..no sm...
|
||||
| 0xF0 0x9F 0x9A 0xAE..0xB1 #E1.0 [4] (🚮..🚱) litter in bin sign....
|
||||
| 0xF0 0x9F 0x9A 0xB2 #E0.6 [1] (🚲) bicycle
|
||||
| 0xF0 0x9F 0x9A 0xB3..0xB5 #E1.0 [3] (🚳..🚵) no bicycles..person...
|
||||
| 0xF0 0x9F 0x9A 0xB6 #E0.6 [1] (🚶) person walking
|
||||
| 0xF0 0x9F 0x9A 0xB7..0xB8 #E1.0 [2] (🚷..🚸) no pedestrians..chi...
|
||||
| 0xF0 0x9F 0x9A 0xB9..0xBE #E0.6 [6] (🚹..🚾) men’s room..water c...
|
||||
| 0xF0 0x9F 0x9A 0xBF #E1.0 [1] (🚿) shower
|
||||
| 0xF0 0x9F 0x9B 0x80 #E0.6 [1] (🛀) person taking bath
|
||||
| 0xF0 0x9F 0x9B 0x81..0x85 #E1.0 [5] (🛁..🛅) bathtub..left luggage
|
||||
| 0xF0 0x9F 0x9B 0x86..0x8A #E0.0 [5] (🛆..🛊) TRIANGLE WITH ROUND...
|
||||
| 0xF0 0x9F 0x9B 0x8B #E0.7 [1] (🛋️) couch and lamp
|
||||
| 0xF0 0x9F 0x9B 0x8C #E1.0 [1] (🛌) person in bed
|
||||
| 0xF0 0x9F 0x9B 0x8D..0x8F #E0.7 [3] (🛍️..🛏️) shopping bags..bed
|
||||
| 0xF0 0x9F 0x9B 0x90 #E1.0 [1] (🛐) place of worship
|
||||
| 0xF0 0x9F 0x9B 0x91..0x92 #E3.0 [2] (🛑..🛒) stop sign..shopping...
|
||||
| 0xF0 0x9F 0x9B 0x93..0x94 #E0.0 [2] (🛓..🛔) STUPA..PAGODA
|
||||
| 0xF0 0x9F 0x9B 0x95 #E12.0 [1] (🛕) hindu temple
|
||||
| 0xF0 0x9F 0x9B 0x96..0x97 #E13.0 [2] (🛖..🛗) hut..elevator
|
||||
| 0xF0 0x9F 0x9B 0x98..0x9F #E0.0 [8] (..🛟) <reserved-1F6D8>..<...
|
||||
| 0xF0 0x9F 0x9B 0xA0..0xA5 #E0.7 [6] (🛠️..🛥️) hammer and wrench...
|
||||
| 0xF0 0x9F 0x9B 0xA6..0xA8 #E0.0 [3] (🛦..🛨) UP-POINTING MILITAR...
|
||||
| 0xF0 0x9F 0x9B 0xA9 #E0.7 [1] (🛩️) small airplane
|
||||
| 0xF0 0x9F 0x9B 0xAA #E0.0 [1] (🛪) NORTHEAST-POINTING AIR...
|
||||
| 0xF0 0x9F 0x9B 0xAB..0xAC #E1.0 [2] (🛫..🛬) airplane departure....
|
||||
| 0xF0 0x9F 0x9B 0xAD..0xAF #E0.0 [3] (..) <reserved-1F6ED>..<...
|
||||
| 0xF0 0x9F 0x9B 0xB0 #E0.7 [1] (🛰️) satellite
|
||||
| 0xF0 0x9F 0x9B 0xB1..0xB2 #E0.0 [2] (🛱..🛲) ONCOMING FIRE ENGIN...
|
||||
| 0xF0 0x9F 0x9B 0xB3 #E0.7 [1] (🛳️) passenger ship
|
||||
| 0xF0 0x9F 0x9B 0xB4..0xB6 #E3.0 [3] (🛴..🛶) kick scooter..canoe
|
||||
| 0xF0 0x9F 0x9B 0xB7..0xB8 #E5.0 [2] (🛷..🛸) sled..flying saucer
|
||||
| 0xF0 0x9F 0x9B 0xB9 #E11.0 [1] (🛹) skateboard
|
||||
| 0xF0 0x9F 0x9B 0xBA #E12.0 [1] (🛺) auto rickshaw
|
||||
| 0xF0 0x9F 0x9B 0xBB..0xBC #E13.0 [2] (🛻..🛼) pickup truck..rolle...
|
||||
| 0xF0 0x9F 0x9B 0xBD..0xBF #E0.0 [3] (..) <reserved-1F6FD>..<...
|
||||
| 0xF0 0x9F 0x9D 0xB4..0xBF #E0.0 [12] (🝴..🝿) <reserved-1F774>..<...
|
||||
| 0xF0 0x9F 0x9F 0x95..0x9F #E0.0 [11] (🟕..) CIRCLED TRIANGLE..<...
|
||||
| 0xF0 0x9F 0x9F 0xA0..0xAB #E12.0 [12] (🟠..🟫) orange circle..brow...
|
||||
| 0xF0 0x9F 0x9F 0xAC..0xBF #E0.0 [20] (..) <reserved-1F7EC>..<...
|
||||
| 0xF0 0x9F 0xA0 0x8C..0x8F #E0.0 [4] (..) <reserved-1F80C>..<...
|
||||
| 0xF0 0x9F 0xA1 0x88..0x8F #E0.0 [8] (..) <reserved-1F848>..<...
|
||||
| 0xF0 0x9F 0xA1 0x9A..0x9F #E0.0 [6] (..) <reserved-1F85A>..<...
|
||||
| 0xF0 0x9F 0xA2 0x88..0x8F #E0.0 [8] (..) <reserved-1F888>..<...
|
||||
| 0xF0 0x9F 0xA2 0xAE..0xFF #E0.0 [82] (..) <reserved-1F8AE>..<...
|
||||
| 0xF0 0x9F 0xA3 0x00..0xBF #
|
||||
| 0xF0 0x9F 0xA4 0x8C #E13.0 [1] (🤌) pinched fingers
|
||||
| 0xF0 0x9F 0xA4 0x8D..0x8F #E12.0 [3] (🤍..🤏) white heart..pinchi...
|
||||
| 0xF0 0x9F 0xA4 0x90..0x98 #E1.0 [9] (🤐..🤘) zipper-mouth face.....
|
||||
| 0xF0 0x9F 0xA4 0x99..0x9E #E3.0 [6] (🤙..🤞) call me hand..cross...
|
||||
| 0xF0 0x9F 0xA4 0x9F #E5.0 [1] (🤟) love-you gesture
|
||||
| 0xF0 0x9F 0xA4 0xA0..0xA7 #E3.0 [8] (🤠..🤧) cowboy hat face..sn...
|
||||
| 0xF0 0x9F 0xA4 0xA8..0xAF #E5.0 [8] (🤨..🤯) face with raised ey...
|
||||
| 0xF0 0x9F 0xA4 0xB0 #E3.0 [1] (🤰) pregnant woman
|
||||
| 0xF0 0x9F 0xA4 0xB1..0xB2 #E5.0 [2] (🤱..🤲) breast-feeding..pal...
|
||||
| 0xF0 0x9F 0xA4 0xB3..0xBA #E3.0 [8] (🤳..🤺) selfie..person fencing
|
||||
| 0xF0 0x9F 0xA4 0xBC..0xBE #E3.0 [3] (🤼..🤾) people wrestling..p...
|
||||
| 0xF0 0x9F 0xA4 0xBF #E12.0 [1] (🤿) diving mask
|
||||
| 0xF0 0x9F 0xA5 0x80..0x85 #E3.0 [6] (🥀..🥅) wilted flower..goal...
|
||||
| 0xF0 0x9F 0xA5 0x87..0x8B #E3.0 [5] (🥇..🥋) 1st place medal..ma...
|
||||
| 0xF0 0x9F 0xA5 0x8C #E5.0 [1] (🥌) curling stone
|
||||
| 0xF0 0x9F 0xA5 0x8D..0x8F #E11.0 [3] (🥍..🥏) lacrosse..flying disc
|
||||
| 0xF0 0x9F 0xA5 0x90..0x9E #E3.0 [15] (🥐..🥞) croissant..pancakes
|
||||
| 0xF0 0x9F 0xA5 0x9F..0xAB #E5.0 [13] (🥟..🥫) dumpling..canned food
|
||||
| 0xF0 0x9F 0xA5 0xAC..0xB0 #E11.0 [5] (🥬..🥰) leafy green..smilin...
|
||||
| 0xF0 0x9F 0xA5 0xB1 #E12.0 [1] (🥱) yawning face
|
||||
| 0xF0 0x9F 0xA5 0xB2 #E13.0 [1] (🥲) smiling face with tear
|
||||
| 0xF0 0x9F 0xA5 0xB3..0xB6 #E11.0 [4] (🥳..🥶) partying face..cold...
|
||||
| 0xF0 0x9F 0xA5 0xB7..0xB8 #E13.0 [2] (🥷..🥸) ninja..disguised face
|
||||
| 0xF0 0x9F 0xA5 0xB9 #E0.0 [1] (🥹) <reserved-1F979>
|
||||
| 0xF0 0x9F 0xA5 0xBA #E11.0 [1] (🥺) pleading face
|
||||
| 0xF0 0x9F 0xA5 0xBB #E12.0 [1] (🥻) sari
|
||||
| 0xF0 0x9F 0xA5 0xBC..0xBF #E11.0 [4] (🥼..🥿) lab coat..flat shoe
|
||||
| 0xF0 0x9F 0xA6 0x80..0x84 #E1.0 [5] (🦀..🦄) crab..unicorn
|
||||
| 0xF0 0x9F 0xA6 0x85..0x91 #E3.0 [13] (🦅..🦑) eagle..squid
|
||||
| 0xF0 0x9F 0xA6 0x92..0x97 #E5.0 [6] (🦒..🦗) giraffe..cricket
|
||||
| 0xF0 0x9F 0xA6 0x98..0xA2 #E11.0 [11] (🦘..🦢) kangaroo..swan
|
||||
| 0xF0 0x9F 0xA6 0xA3..0xA4 #E13.0 [2] (🦣..🦤) mammoth..dodo
|
||||
| 0xF0 0x9F 0xA6 0xA5..0xAA #E12.0 [6] (🦥..🦪) sloth..oyster
|
||||
| 0xF0 0x9F 0xA6 0xAB..0xAD #E13.0 [3] (🦫..🦭) beaver..seal
|
||||
| 0xF0 0x9F 0xA6 0xAE..0xAF #E12.0 [2] (🦮..🦯) guide dog..white cane
|
||||
| 0xF0 0x9F 0xA6 0xB0..0xB9 #E11.0 [10] (🦰..🦹) red hair..supervillain
|
||||
| 0xF0 0x9F 0xA6 0xBA..0xBF #E12.0 [6] (🦺..🦿) safety vest..mechan...
|
||||
| 0xF0 0x9F 0xA7 0x80 #E1.0 [1] (🧀) cheese wedge
|
||||
| 0xF0 0x9F 0xA7 0x81..0x82 #E11.0 [2] (🧁..🧂) cupcake..salt
|
||||
| 0xF0 0x9F 0xA7 0x83..0x8A #E12.0 [8] (🧃..🧊) beverage box..ice
|
||||
| 0xF0 0x9F 0xA7 0x8B #E13.0 [1] (🧋) bubble tea
|
||||
| 0xF0 0x9F 0xA7 0x8C #E0.0 [1] (🧌) <reserved-1F9CC>
|
||||
| 0xF0 0x9F 0xA7 0x8D..0x8F #E12.0 [3] (🧍..🧏) person standing..de...
|
||||
| 0xF0 0x9F 0xA7 0x90..0xA6 #E5.0 [23] (🧐..🧦) face with monocle.....
|
||||
| 0xF0 0x9F 0xA7 0xA7..0xBF #E11.0 [25] (🧧..🧿) red envelope..nazar...
|
||||
| 0xF0 0x9F 0xA8 0x80..0xFF #E0.0 [112] (🨀..) NEUTRAL CHESS KING....
|
||||
| 0xF0 0x9F 0xA9 0x00..0xAF #
|
||||
| 0xF0 0x9F 0xA9 0xB0..0xB3 #E12.0 [4] (🩰..🩳) ballet shoes..shorts
|
||||
| 0xF0 0x9F 0xA9 0xB4 #E13.0 [1] (🩴) thong sandal
|
||||
| 0xF0 0x9F 0xA9 0xB5..0xB7 #E0.0 [3] (🩵..🩷) <reserved-1FA75>..<...
|
||||
| 0xF0 0x9F 0xA9 0xB8..0xBA #E12.0 [3] (🩸..🩺) drop of blood..stet...
|
||||
| 0xF0 0x9F 0xA9 0xBB..0xBF #E0.0 [5] (🩻..) <reserved-1FA7B>..<...
|
||||
| 0xF0 0x9F 0xAA 0x80..0x82 #E12.0 [3] (🪀..🪂) yo-yo..parachute
|
||||
| 0xF0 0x9F 0xAA 0x83..0x86 #E13.0 [4] (🪃..🪆) boomerang..nesting ...
|
||||
| 0xF0 0x9F 0xAA 0x87..0x8F #E0.0 [9] (🪇..) <reserved-1FA87>..<...
|
||||
| 0xF0 0x9F 0xAA 0x90..0x95 #E12.0 [6] (🪐..🪕) ringed planet..banjo
|
||||
| 0xF0 0x9F 0xAA 0x96..0xA8 #E13.0 [19] (🪖..🪨) military helmet..rock
|
||||
| 0xF0 0x9F 0xAA 0xA9..0xAF #E0.0 [7] (🪩..🪯) <reserved-1FAA9>..<...
|
||||
| 0xF0 0x9F 0xAA 0xB0..0xB6 #E13.0 [7] (🪰..🪶) fly..feather
|
||||
| 0xF0 0x9F 0xAA 0xB7..0xBF #E0.0 [9] (🪷..🪿) <reserved-1FAB7>..<...
|
||||
| 0xF0 0x9F 0xAB 0x80..0x82 #E13.0 [3] (🫀..🫂) anatomical heart..p...
|
||||
| 0xF0 0x9F 0xAB 0x83..0x8F #E0.0 [13] (🫃..🫏) <reserved-1FAC3>..<...
|
||||
| 0xF0 0x9F 0xAB 0x90..0x96 #E13.0 [7] (🫐..🫖) blueberries..teapot
|
||||
| 0xF0 0x9F 0xAB 0x97..0xBF #E0.0 [41] (🫗..) <reserved-1FAD7>..<...
|
||||
| 0xF0 0x9F 0xB0 0x80..0xFF #E0.0[1022] (..) <reserved-1FC...
|
||||
| 0xF0 0x9F 0xB1..0xBE 0x00..0xFF #
|
||||
| 0xF0 0x9F 0xBF 0x00..0xBD #
|
||||
;
|
||||
|
||||
}%%
|
8
vendor/github.com/apparentlymart/go-textseg/v13/textseg/generate.go
generated
vendored
Normal file
8
vendor/github.com/apparentlymart/go-textseg/v13/textseg/generate.go
generated
vendored
Normal file
|
@ -0,0 +1,8 @@
|
|||
package textseg
|
||||
|
||||
//go:generate go run make_tables.go -output tables.go
|
||||
//go:generate go run make_test_tables.go -output tables_test.go
|
||||
//go:generate ruby unicode2ragel.rb --url=https://www.unicode.org/Public/13.0.0/ucd/auxiliary/GraphemeBreakProperty.txt -m GraphemeCluster -p "Prepend,CR,LF,Control,Extend,Regional_Indicator,SpacingMark,L,V,T,LV,LVT,ZWJ" -o grapheme_clusters_table.rl
|
||||
//go:generate ruby unicode2ragel.rb --url=https://www.unicode.org/Public/13.0.0/ucd/emoji/emoji-data.txt -m Emoji -p "Extended_Pictographic" -o emoji_table.rl
|
||||
//go:generate ragel -Z grapheme_clusters.rl
|
||||
//go:generate gofmt -w grapheme_clusters.go
|
4138
vendor/github.com/apparentlymart/go-textseg/v13/textseg/grapheme_clusters.go
generated
vendored
Normal file
4138
vendor/github.com/apparentlymart/go-textseg/v13/textseg/grapheme_clusters.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
133
vendor/github.com/apparentlymart/go-textseg/v13/textseg/grapheme_clusters.rl
generated
vendored
Normal file
133
vendor/github.com/apparentlymart/go-textseg/v13/textseg/grapheme_clusters.rl
generated
vendored
Normal file
|
@ -0,0 +1,133 @@
|
|||
package textseg
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
// Generated from grapheme_clusters.rl. DO NOT EDIT
|
||||
%%{
|
||||
# (except you are actually in grapheme_clusters.rl here, so edit away!)
|
||||
|
||||
machine graphclust;
|
||||
write data;
|
||||
}%%
|
||||
|
||||
var Error = errors.New("invalid UTF8 text")
|
||||
|
||||
// ScanGraphemeClusters is a split function for bufio.Scanner that splits
|
||||
// on grapheme cluster boundaries.
|
||||
func ScanGraphemeClusters(data []byte, atEOF bool) (int, []byte, error) {
|
||||
if len(data) == 0 {
|
||||
return 0, nil, nil
|
||||
}
|
||||
|
||||
// Ragel state
|
||||
cs := 0 // Current State
|
||||
p := 0 // "Pointer" into data
|
||||
pe := len(data) // End-of-data "pointer"
|
||||
ts := 0
|
||||
te := 0
|
||||
act := 0
|
||||
eof := pe
|
||||
|
||||
// Make Go compiler happy
|
||||
_ = ts
|
||||
_ = te
|
||||
_ = act
|
||||
_ = eof
|
||||
|
||||
startPos := 0
|
||||
endPos := 0
|
||||
|
||||
%%{
|
||||
include GraphemeCluster "grapheme_clusters_table.rl";
|
||||
include Emoji "emoji_table.rl";
|
||||
|
||||
action start {
|
||||
startPos = p
|
||||
}
|
||||
|
||||
action end {
|
||||
endPos = p
|
||||
}
|
||||
|
||||
action emit {
|
||||
return endPos+1, data[startPos:endPos+1], nil
|
||||
}
|
||||
|
||||
ZWJGlue = ZWJ (Extended_Pictographic Extend*)?;
|
||||
AnyExtender = Extend | ZWJGlue | SpacingMark;
|
||||
Extension = AnyExtender*;
|
||||
ReplacementChar = (0xEF 0xBF 0xBD);
|
||||
|
||||
CRLFSeq = CR LF;
|
||||
ControlSeq = Control | ReplacementChar;
|
||||
HangulSeq = (
|
||||
L+ (((LV? V+ | LVT) T*)?|LV?) |
|
||||
LV V* T* |
|
||||
V+ T* |
|
||||
LVT T* |
|
||||
T+
|
||||
) Extension;
|
||||
EmojiSeq = Extended_Pictographic Extend* Extension;
|
||||
ZWJSeq = ZWJ (ZWJ | Extend | SpacingMark)*;
|
||||
EmojiFlagSeq = Regional_Indicator Regional_Indicator? Extension;
|
||||
|
||||
UTF8Cont = 0x80 .. 0xBF;
|
||||
AnyUTF8 = (
|
||||
0x00..0x7F |
|
||||
0xC0..0xDF . UTF8Cont |
|
||||
0xE0..0xEF . UTF8Cont . UTF8Cont |
|
||||
0xF0..0xF7 . UTF8Cont . UTF8Cont . UTF8Cont
|
||||
);
|
||||
|
||||
# OtherSeq is any character that isn't at the start of one of the extended sequences above, followed by extension
|
||||
OtherSeq = (AnyUTF8 - (CR|LF|Control|ReplacementChar|L|LV|V|LVT|T|Extended_Pictographic|ZWJ|Regional_Indicator|Prepend)) (Extend | ZWJ | SpacingMark)*;
|
||||
|
||||
# PrependSeq is prepend followed by any of the other patterns above, except control characters which explicitly break
|
||||
PrependSeq = Prepend+ (HangulSeq|EmojiSeq|ZWJSeq|EmojiFlagSeq|OtherSeq)?;
|
||||
|
||||
CRLFTok = CRLFSeq >start @end;
|
||||
ControlTok = ControlSeq >start @end;
|
||||
HangulTok = HangulSeq >start @end;
|
||||
EmojiTok = EmojiSeq >start @end;
|
||||
ZWJTok = ZWJSeq >start @end;
|
||||
EmojiFlagTok = EmojiFlagSeq >start @end;
|
||||
OtherTok = OtherSeq >start @end;
|
||||
PrependTok = PrependSeq >start @end;
|
||||
|
||||
main := |*
|
||||
CRLFTok => emit;
|
||||
ControlTok => emit;
|
||||
HangulTok => emit;
|
||||
EmojiTok => emit;
|
||||
ZWJTok => emit;
|
||||
EmojiFlagTok => emit;
|
||||
PrependTok => emit;
|
||||
OtherTok => emit;
|
||||
|
||||
# any single valid UTF-8 character would also be valid per spec,
|
||||
# but we'll handle that separately after the loop so we can deal
|
||||
# with requesting more bytes if we're not at EOF.
|
||||
*|;
|
||||
|
||||
write init;
|
||||
write exec;
|
||||
}%%
|
||||
|
||||
// If we fall out here then we were unable to complete a sequence.
|
||||
// If we weren't able to complete a sequence then either we've
|
||||
// reached the end of a partial buffer (so there's more data to come)
|
||||
// or we have an isolated symbol that would normally be part of a
|
||||
// grapheme cluster but has appeared in isolation here.
|
||||
|
||||
if !atEOF {
|
||||
// Request more
|
||||
return 0, nil, nil
|
||||
}
|
||||
|
||||
// Just take the first UTF-8 sequence and return that.
|
||||
_, seqLen := utf8.DecodeRune(data)
|
||||
return seqLen, data[:seqLen], nil
|
||||
}
|
1609
vendor/github.com/apparentlymart/go-textseg/v13/textseg/grapheme_clusters_table.rl
generated
vendored
Normal file
1609
vendor/github.com/apparentlymart/go-textseg/v13/textseg/grapheme_clusters_table.rl
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
5833
vendor/github.com/apparentlymart/go-textseg/v13/textseg/tables.go
generated
vendored
Normal file
5833
vendor/github.com/apparentlymart/go-textseg/v13/textseg/tables.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
335
vendor/github.com/apparentlymart/go-textseg/v13/textseg/unicode2ragel.rb
generated
vendored
Normal file
335
vendor/github.com/apparentlymart/go-textseg/v13/textseg/unicode2ragel.rb
generated
vendored
Normal file
|
@ -0,0 +1,335 @@
|
|||
#!/usr/bin/env ruby
|
||||
#
|
||||
# This scripted has been updated to accept more command-line arguments:
|
||||
#
|
||||
# -u, --url URL to process
|
||||
# -m, --machine Machine name
|
||||
# -p, --properties Properties to add to the machine
|
||||
# -o, --output Write output to file
|
||||
#
|
||||
# Updated by: Marty Schoch <marty.schoch@gmail.com>
|
||||
#
|
||||
# This script uses the unicode spec to generate a Ragel state machine
|
||||
# that recognizes unicode alphanumeric characters. It generates 5
|
||||
# character classes: uupper, ulower, ualpha, udigit, and ualnum.
|
||||
# Currently supported encodings are UTF-8 [default] and UCS-4.
|
||||
#
|
||||
# Usage: unicode2ragel.rb [options]
|
||||
# -e, --encoding [ucs4 | utf8] Data encoding
|
||||
# -h, --help Show this message
|
||||
#
|
||||
# This script was originally written as part of the Ferret search
|
||||
# engine library.
|
||||
#
|
||||
# Author: Rakan El-Khalil <rakan@well.com>
|
||||
|
||||
require 'optparse'
|
||||
require 'open-uri'
|
||||
|
||||
ENCODINGS = [ :utf8, :ucs4 ]
|
||||
ALPHTYPES = { :utf8 => "byte", :ucs4 => "rune" }
|
||||
DEFAULT_CHART_URL = "http://www.unicode.org/Public/5.1.0/ucd/DerivedCoreProperties.txt"
|
||||
DEFAULT_MACHINE_NAME= "WChar"
|
||||
|
||||
###
|
||||
# Display vars & default option
|
||||
|
||||
TOTAL_WIDTH = 80
|
||||
RANGE_WIDTH = 23
|
||||
@encoding = :utf8
|
||||
@chart_url = DEFAULT_CHART_URL
|
||||
machine_name = DEFAULT_MACHINE_NAME
|
||||
properties = []
|
||||
@output = $stdout
|
||||
|
||||
###
|
||||
# Option parsing
|
||||
|
||||
cli_opts = OptionParser.new do |opts|
|
||||
opts.on("-e", "--encoding [ucs4 | utf8]", "Data encoding") do |o|
|
||||
@encoding = o.downcase.to_sym
|
||||
end
|
||||
opts.on("-h", "--help", "Show this message") do
|
||||
puts opts
|
||||
exit
|
||||
end
|
||||
opts.on("-u", "--url URL", "URL to process") do |o|
|
||||
@chart_url = o
|
||||
end
|
||||
opts.on("-m", "--machine MACHINE_NAME", "Machine name") do |o|
|
||||
machine_name = o
|
||||
end
|
||||
opts.on("-p", "--properties x,y,z", Array, "Properties to add to machine") do |o|
|
||||
properties = o
|
||||
end
|
||||
opts.on("-o", "--output FILE", "output file") do |o|
|
||||
@output = File.new(o, "w+")
|
||||
end
|
||||
end
|
||||
|
||||
cli_opts.parse(ARGV)
|
||||
unless ENCODINGS.member? @encoding
|
||||
puts "Invalid encoding: #{@encoding}"
|
||||
puts cli_opts
|
||||
exit
|
||||
end
|
||||
|
||||
##
|
||||
# Downloads the document at url and yields every alpha line's hex
|
||||
# range and description.
|
||||
|
||||
def each_alpha( url, property )
|
||||
URI.open( url ) do |file|
|
||||
file.each_line do |line|
|
||||
next if line =~ /^#/;
|
||||
next if line !~ /; #{property} *#/;
|
||||
|
||||
range, description = line.split(/;/)
|
||||
range.strip!
|
||||
description.gsub!(/.*#/, '').strip!
|
||||
|
||||
if range =~ /\.\./
|
||||
start, stop = range.split '..'
|
||||
else start = stop = range
|
||||
end
|
||||
|
||||
yield start.hex .. stop.hex, description
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
###
|
||||
# Formats to hex at minimum width
|
||||
|
||||
def to_hex( n )
|
||||
r = "%0X" % n
|
||||
r = "0#{r}" unless (r.length % 2).zero?
|
||||
r
|
||||
end
|
||||
|
||||
###
|
||||
# UCS4 is just a straight hex conversion of the unicode codepoint.
|
||||
|
||||
def to_ucs4( range )
|
||||
rangestr = "0x" + to_hex(range.begin)
|
||||
rangestr << "..0x" + to_hex(range.end) if range.begin != range.end
|
||||
[ rangestr ]
|
||||
end
|
||||
|
||||
##
|
||||
# 0x00 - 0x7f -> 0zzzzzzz[7]
|
||||
# 0x80 - 0x7ff -> 110yyyyy[5] 10zzzzzz[6]
|
||||
# 0x800 - 0xffff -> 1110xxxx[4] 10yyyyyy[6] 10zzzzzz[6]
|
||||
# 0x010000 - 0x10ffff -> 11110www[3] 10xxxxxx[6] 10yyyyyy[6] 10zzzzzz[6]
|
||||
|
||||
UTF8_BOUNDARIES = [0x7f, 0x7ff, 0xffff, 0x10ffff]
|
||||
|
||||
def to_utf8_enc( n )
|
||||
r = 0
|
||||
if n <= 0x7f
|
||||
r = n
|
||||
elsif n <= 0x7ff
|
||||
y = 0xc0 | (n >> 6)
|
||||
z = 0x80 | (n & 0x3f)
|
||||
r = y << 8 | z
|
||||
elsif n <= 0xffff
|
||||
x = 0xe0 | (n >> 12)
|
||||
y = 0x80 | (n >> 6) & 0x3f
|
||||
z = 0x80 | n & 0x3f
|
||||
r = x << 16 | y << 8 | z
|
||||
elsif n <= 0x10ffff
|
||||
w = 0xf0 | (n >> 18)
|
||||
x = 0x80 | (n >> 12) & 0x3f
|
||||
y = 0x80 | (n >> 6) & 0x3f
|
||||
z = 0x80 | n & 0x3f
|
||||
r = w << 24 | x << 16 | y << 8 | z
|
||||
end
|
||||
|
||||
to_hex(r)
|
||||
end
|
||||
|
||||
def from_utf8_enc( n )
|
||||
n = n.hex
|
||||
r = 0
|
||||
if n <= 0x7f
|
||||
r = n
|
||||
elsif n <= 0xdfff
|
||||
y = (n >> 8) & 0x1f
|
||||
z = n & 0x3f
|
||||
r = y << 6 | z
|
||||
elsif n <= 0xefffff
|
||||
x = (n >> 16) & 0x0f
|
||||
y = (n >> 8) & 0x3f
|
||||
z = n & 0x3f
|
||||
r = x << 10 | y << 6 | z
|
||||
elsif n <= 0xf7ffffff
|
||||
w = (n >> 24) & 0x07
|
||||
x = (n >> 16) & 0x3f
|
||||
y = (n >> 8) & 0x3f
|
||||
z = n & 0x3f
|
||||
r = w << 18 | x << 12 | y << 6 | z
|
||||
end
|
||||
r
|
||||
end
|
||||
|
||||
###
|
||||
# Given a range, splits it up into ranges that can be continuously
|
||||
# encoded into utf8. Eg: 0x00 .. 0xff => [0x00..0x7f, 0x80..0xff]
|
||||
# This is not strictly needed since the current [5.1] unicode standard
|
||||
# doesn't have ranges that straddle utf8 boundaries. This is included
|
||||
# for completeness as there is no telling if that will ever change.
|
||||
|
||||
def utf8_ranges( range )
|
||||
ranges = []
|
||||
UTF8_BOUNDARIES.each do |max|
|
||||
if range.begin <= max
|
||||
if range.end <= max
|
||||
ranges << range
|
||||
return ranges
|
||||
end
|
||||
|
||||
ranges << (range.begin .. max)
|
||||
range = (max + 1) .. range.end
|
||||
end
|
||||
end
|
||||
ranges
|
||||
end
|
||||
|
||||
def build_range( start, stop )
|
||||
size = start.size/2
|
||||
left = size - 1
|
||||
return [""] if size < 1
|
||||
|
||||
a = start[0..1]
|
||||
b = stop[0..1]
|
||||
|
||||
###
|
||||
# Shared prefix
|
||||
|
||||
if a == b
|
||||
return build_range(start[2..-1], stop[2..-1]).map do |elt|
|
||||
"0x#{a} " + elt
|
||||
end
|
||||
end
|
||||
|
||||
###
|
||||
# Unshared prefix, end of run
|
||||
|
||||
return ["0x#{a}..0x#{b} "] if left.zero?
|
||||
|
||||
###
|
||||
# Unshared prefix, not end of run
|
||||
# Range can be 0x123456..0x56789A
|
||||
# Which is equivalent to:
|
||||
# 0x123456 .. 0x12FFFF
|
||||
# 0x130000 .. 0x55FFFF
|
||||
# 0x560000 .. 0x56789A
|
||||
|
||||
ret = []
|
||||
ret << build_range(start, a + "FF" * left)
|
||||
|
||||
###
|
||||
# Only generate middle range if need be.
|
||||
|
||||
if a.hex+1 != b.hex
|
||||
max = to_hex(b.hex - 1)
|
||||
max = "FF" if b == "FF"
|
||||
ret << "0x#{to_hex(a.hex+1)}..0x#{max} " + "0x00..0xFF " * left
|
||||
end
|
||||
|
||||
###
|
||||
# Don't generate last range if it is covered by first range
|
||||
|
||||
ret << build_range(b + "00" * left, stop) unless b == "FF"
|
||||
ret.flatten!
|
||||
end
|
||||
|
||||
def to_utf8( range )
|
||||
utf8_ranges( range ).map do |r|
|
||||
begin_enc = to_utf8_enc(r.begin)
|
||||
end_enc = to_utf8_enc(r.end)
|
||||
build_range begin_enc, end_enc
|
||||
end.flatten!
|
||||
end
|
||||
|
||||
##
|
||||
# Perform a 3-way comparison of the number of codepoints advertised by
|
||||
# the unicode spec for the given range, the originally parsed range,
|
||||
# and the resulting utf8 encoded range.
|
||||
|
||||
def count_codepoints( code )
|
||||
code.split(' ').inject(1) do |acc, elt|
|
||||
if elt =~ /0x(.+)\.\.0x(.+)/
|
||||
if @encoding == :utf8
|
||||
acc * (from_utf8_enc($2) - from_utf8_enc($1) + 1)
|
||||
else
|
||||
acc * ($2.hex - $1.hex + 1)
|
||||
end
|
||||
else
|
||||
acc
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def is_valid?( range, desc, codes )
|
||||
spec_count = 1
|
||||
spec_count = $1.to_i if desc =~ /\[(\d+)\]/
|
||||
range_count = range.end - range.begin + 1
|
||||
|
||||
sum = codes.inject(0) { |acc, elt| acc + count_codepoints(elt) }
|
||||
sum == spec_count and sum == range_count
|
||||
end
|
||||
|
||||
##
|
||||
# Generate the state maching to stdout
|
||||
|
||||
def generate_machine( name, property )
|
||||
pipe = " "
|
||||
@output.puts " #{name} = "
|
||||
each_alpha( @chart_url, property ) do |range, desc|
|
||||
|
||||
codes = (@encoding == :ucs4) ? to_ucs4(range) : to_utf8(range)
|
||||
|
||||
#raise "Invalid encoding of range #{range}: #{codes.inspect}" unless
|
||||
# is_valid? range, desc, codes
|
||||
|
||||
range_width = codes.map { |a| a.size }.max
|
||||
range_width = RANGE_WIDTH if range_width < RANGE_WIDTH
|
||||
|
||||
desc_width = TOTAL_WIDTH - RANGE_WIDTH - 11
|
||||
desc_width -= (range_width - RANGE_WIDTH) if range_width > RANGE_WIDTH
|
||||
|
||||
if desc.size > desc_width
|
||||
desc = desc[0..desc_width - 4] + "..."
|
||||
end
|
||||
|
||||
codes.each_with_index do |r, idx|
|
||||
desc = "" unless idx.zero?
|
||||
code = "%-#{range_width}s" % r
|
||||
@output.puts " #{pipe} #{code} ##{desc}"
|
||||
pipe = "|"
|
||||
end
|
||||
end
|
||||
@output.puts " ;"
|
||||
@output.puts ""
|
||||
end
|
||||
|
||||
@output.puts <<EOF
|
||||
# The following Ragel file was autogenerated with #{$0}
|
||||
# from: #{@chart_url}
|
||||
#
|
||||
# It defines #{properties}.
|
||||
#
|
||||
# To use this, make sure that your alphtype is set to #{ALPHTYPES[@encoding]},
|
||||
# and that your input is in #{@encoding}.
|
||||
|
||||
%%{
|
||||
machine #{machine_name};
|
||||
|
||||
EOF
|
||||
|
||||
properties.each { |x| generate_machine( x, x ) }
|
||||
|
||||
@output.puts <<EOF
|
||||
}%%
|
||||
EOF
|
19
vendor/github.com/apparentlymart/go-textseg/v13/textseg/utf8_seqs.go
generated
vendored
Normal file
19
vendor/github.com/apparentlymart/go-textseg/v13/textseg/utf8_seqs.go
generated
vendored
Normal file
|
@ -0,0 +1,19 @@
|
|||
package textseg
|
||||
|
||||
import "unicode/utf8"
|
||||
|
||||
// ScanGraphemeClusters is a split function for bufio.Scanner that splits
|
||||
// on UTF8 sequence boundaries.
|
||||
//
|
||||
// This is included largely for completeness, since this behavior is already
|
||||
// built in to Go when ranging over a string.
|
||||
func ScanUTF8Sequences(data []byte, atEOF bool) (int, []byte, error) {
|
||||
if len(data) == 0 {
|
||||
return 0, nil, nil
|
||||
}
|
||||
r, seqLen := utf8.DecodeRune(data)
|
||||
if r == utf8.RuneError && !atEOF {
|
||||
return 0, nil, nil
|
||||
}
|
||||
return seqLen, data[:seqLen], nil
|
||||
}
|
|
@ -33,14 +33,25 @@ func getConversion(in cty.Type, out cty.Type, unsafe bool) conversion {
|
|||
// Conversion to DynamicPseudoType always just passes through verbatim.
|
||||
return in, nil
|
||||
}
|
||||
if !in.IsKnown() {
|
||||
return cty.UnknownVal(out), nil
|
||||
}
|
||||
if in.IsNull() {
|
||||
// We'll pass through nulls, albeit type converted, and let
|
||||
// the caller deal with whatever handling they want to do in
|
||||
// case null values are considered valid in some applications.
|
||||
return cty.NullVal(out), nil
|
||||
if isKnown, isNull := in.IsKnown(), in.IsNull(); !isKnown || isNull {
|
||||
// Avoid constructing unknown or null values with types which
|
||||
// include optional attributes. Known or non-null object values
|
||||
// will be passed to a conversion function which drops the optional
|
||||
// attributes from the type. Unknown and null pass through values
|
||||
// must do the same to ensure that homogeneous collections have a
|
||||
// single element type.
|
||||
out = out.WithoutOptionalAttributesDeep()
|
||||
|
||||
if !isKnown {
|
||||
return cty.UnknownVal(out), nil
|
||||
}
|
||||
|
||||
if isNull {
|
||||
// We'll pass through nulls, albeit type converted, and let
|
||||
// the caller deal with whatever handling they want to do in
|
||||
// case null values are considered valid in some applications.
|
||||
return cty.NullVal(out), nil
|
||||
}
|
||||
}
|
||||
|
||||
return conv(in, path)
|
||||
|
|
|
@ -45,12 +45,18 @@ func conversionCollectionToList(ety cty.Type, conv conversion) conversion {
|
|||
}
|
||||
|
||||
if len(elems) == 0 {
|
||||
// Prefer a concrete type over a dynamic type when returning an
|
||||
// empty list
|
||||
if ety == cty.DynamicPseudoType {
|
||||
ety = val.Type().ElementType()
|
||||
return cty.ListValEmpty(val.Type().ElementType()), nil
|
||||
}
|
||||
return cty.ListValEmpty(ety), nil
|
||||
}
|
||||
|
||||
if !cty.CanListVal(elems) {
|
||||
return cty.NilVal, path.NewErrorf("element types must all match for conversion to list")
|
||||
}
|
||||
|
||||
return cty.ListVal(elems), nil
|
||||
}
|
||||
}
|
||||
|
@ -91,11 +97,15 @@ func conversionCollectionToSet(ety cty.Type, conv conversion) conversion {
|
|||
// Prefer a concrete type over a dynamic type when returning an
|
||||
// empty set
|
||||
if ety == cty.DynamicPseudoType {
|
||||
ety = val.Type().ElementType()
|
||||
return cty.SetValEmpty(val.Type().ElementType()), nil
|
||||
}
|
||||
return cty.SetValEmpty(ety), nil
|
||||
}
|
||||
|
||||
if !cty.CanSetVal(elems) {
|
||||
return cty.NilVal, path.NewErrorf("element types must all match for conversion to set")
|
||||
}
|
||||
|
||||
return cty.SetVal(elems), nil
|
||||
}
|
||||
}
|
||||
|
@ -140,7 +150,7 @@ func conversionCollectionToMap(ety cty.Type, conv conversion) conversion {
|
|||
// Prefer a concrete type over a dynamic type when returning an
|
||||
// empty map
|
||||
if ety == cty.DynamicPseudoType {
|
||||
ety = val.Type().ElementType()
|
||||
return cty.MapValEmpty(val.Type().ElementType()), nil
|
||||
}
|
||||
return cty.MapValEmpty(ety), nil
|
||||
}
|
||||
|
@ -152,8 +162,8 @@ func conversionCollectionToMap(ety cty.Type, conv conversion) conversion {
|
|||
}
|
||||
}
|
||||
|
||||
if err := conversionCheckMapElementTypes(elems, path); err != nil {
|
||||
return cty.NilVal, err
|
||||
if !cty.CanMapVal(elems) {
|
||||
return cty.NilVal, path.NewErrorf("element types must all match for conversion to map")
|
||||
}
|
||||
|
||||
return cty.MapVal(elems), nil
|
||||
|
@ -237,6 +247,10 @@ func conversionTupleToSet(tupleType cty.Type, setEty cty.Type, unsafe bool) conv
|
|||
i++
|
||||
}
|
||||
|
||||
if !cty.CanSetVal(elems) {
|
||||
return cty.NilVal, path.NewErrorf("element types must all match for conversion to set")
|
||||
}
|
||||
|
||||
return cty.SetVal(elems), nil
|
||||
}
|
||||
}
|
||||
|
@ -324,6 +338,11 @@ func conversionTupleToList(tupleType cty.Type, listEty cty.Type, unsafe bool) co
|
|||
if err != nil {
|
||||
return cty.NilVal, err
|
||||
}
|
||||
|
||||
if !cty.CanListVal(elems) {
|
||||
return cty.NilVal, path.NewErrorf("element types must all match for conversion to list")
|
||||
}
|
||||
|
||||
return cty.ListVal(elems), nil
|
||||
}
|
||||
}
|
||||
|
@ -402,8 +421,8 @@ func conversionObjectToMap(objectType cty.Type, mapEty cty.Type, unsafe bool) co
|
|||
}
|
||||
}
|
||||
|
||||
if err := conversionCheckMapElementTypes(elems, path); err != nil {
|
||||
return cty.NilVal, err
|
||||
if !cty.CanMapVal(elems) {
|
||||
return cty.NilVal, path.NewErrorf("attribute types must all match for conversion to map")
|
||||
}
|
||||
|
||||
return cty.MapVal(elems), nil
|
||||
|
@ -487,7 +506,7 @@ func conversionUnifyCollectionElements(elems map[string]cty.Value, path cty.Path
|
|||
}
|
||||
unifiedType, _ := unify(elemTypes, unsafe)
|
||||
if unifiedType == cty.NilType {
|
||||
return nil, path.NewErrorf("collection elements cannot be unified")
|
||||
return nil, path.NewErrorf("cannot find a common base type for all elements")
|
||||
}
|
||||
|
||||
unifiedElems := make(map[string]cty.Value)
|
||||
|
@ -514,26 +533,6 @@ func conversionUnifyCollectionElements(elems map[string]cty.Value, path cty.Path
|
|||
return unifiedElems, nil
|
||||
}
|
||||
|
||||
func conversionCheckMapElementTypes(elems map[string]cty.Value, path cty.Path) error {
|
||||
elementType := cty.NilType
|
||||
elemPath := append(path.Copy(), nil)
|
||||
|
||||
for name, elem := range elems {
|
||||
if elementType == cty.NilType {
|
||||
elementType = elem.Type()
|
||||
continue
|
||||
}
|
||||
if !elementType.Equals(elem.Type()) {
|
||||
elemPath[len(elemPath)-1] = cty.IndexStep{
|
||||
Key: cty.StringVal(name),
|
||||
}
|
||||
return elemPath.NewErrorf("%s is required", elementType.FriendlyName())
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func conversionUnifyListElements(elems []cty.Value, path cty.Path, unsafe bool) ([]cty.Value, error) {
|
||||
elemTypes := make([]cty.Type, len(elems))
|
||||
for i, elem := range elems {
|
||||
|
@ -541,7 +540,7 @@ func conversionUnifyListElements(elems []cty.Value, path cty.Path, unsafe bool)
|
|||
}
|
||||
unifiedType, _ := unify(elemTypes, unsafe)
|
||||
if unifiedType == cty.NilType {
|
||||
return nil, path.NewErrorf("collection elements cannot be unified")
|
||||
return nil, path.NewErrorf("cannot find a common base type for all elements")
|
||||
}
|
||||
|
||||
ret := make([]cty.Value, len(elems))
|
||||
|
|
|
@ -29,6 +29,8 @@ func unify(types []cty.Type, unsafe bool) (cty.Type, []Conversion) {
|
|||
// unification purposes.
|
||||
{
|
||||
mapCt := 0
|
||||
listCt := 0
|
||||
setCt := 0
|
||||
objectCt := 0
|
||||
tupleCt := 0
|
||||
dynamicCt := 0
|
||||
|
@ -36,6 +38,10 @@ func unify(types []cty.Type, unsafe bool) (cty.Type, []Conversion) {
|
|||
switch {
|
||||
case ty.IsMapType():
|
||||
mapCt++
|
||||
case ty.IsListType():
|
||||
listCt++
|
||||
case ty.IsSetType():
|
||||
setCt++
|
||||
case ty.IsObjectType():
|
||||
objectCt++
|
||||
case ty.IsTupleType():
|
||||
|
@ -48,7 +54,31 @@ func unify(types []cty.Type, unsafe bool) (cty.Type, []Conversion) {
|
|||
}
|
||||
switch {
|
||||
case mapCt > 0 && (mapCt+dynamicCt) == len(types):
|
||||
return unifyMapTypes(types, unsafe, dynamicCt > 0)
|
||||
return unifyCollectionTypes(cty.Map, types, unsafe, dynamicCt > 0)
|
||||
|
||||
case mapCt > 0 && (mapCt+objectCt+dynamicCt) == len(types):
|
||||
// Objects often contain map data, but are not directly typed as
|
||||
// such due to language constructs or function types. Try to unify
|
||||
// them as maps first before falling back to heterogeneous type
|
||||
// conversion.
|
||||
ty, convs := unifyObjectsAsMaps(types, unsafe)
|
||||
// If we got a map back, we know the unification was successful.
|
||||
if ty.IsMapType() {
|
||||
return ty, convs
|
||||
}
|
||||
case listCt > 0 && (listCt+dynamicCt) == len(types):
|
||||
return unifyCollectionTypes(cty.List, types, unsafe, dynamicCt > 0)
|
||||
case listCt > 0 && (listCt+tupleCt+dynamicCt) == len(types):
|
||||
// Tuples are often lists in disguise, and we may be able to
|
||||
// unify them as such.
|
||||
ty, convs := unifyTuplesAsList(types, unsafe)
|
||||
// if we got a list back, we know the unification was successful.
|
||||
// Otherwise we will fall back to the heterogeneous type codepath.
|
||||
if ty.IsListType() {
|
||||
return ty, convs
|
||||
}
|
||||
case setCt > 0 && (setCt+dynamicCt) == len(types):
|
||||
return unifyCollectionTypes(cty.Set, types, unsafe, dynamicCt > 0)
|
||||
case objectCt > 0 && (objectCt+dynamicCt) == len(types):
|
||||
return unifyObjectTypes(types, unsafe, dynamicCt > 0)
|
||||
case tupleCt > 0 && (tupleCt+dynamicCt) == len(types):
|
||||
|
@ -100,7 +130,121 @@ Preferences:
|
|||
return cty.NilType, nil
|
||||
}
|
||||
|
||||
func unifyMapTypes(types []cty.Type, unsafe bool, hasDynamic bool) (cty.Type, []Conversion) {
|
||||
// unifyTuplesAsList attempts to first see if the tuples unify as lists, then
|
||||
// re-unifies the given types with the list in place of the tuples.
|
||||
func unifyTuplesAsList(types []cty.Type, unsafe bool) (cty.Type, []Conversion) {
|
||||
var tuples []cty.Type
|
||||
var tupleIdxs []int
|
||||
for i, t := range types {
|
||||
if t.IsTupleType() {
|
||||
tuples = append(tuples, t)
|
||||
tupleIdxs = append(tupleIdxs, i)
|
||||
}
|
||||
}
|
||||
|
||||
ty, tupleConvs := unifyTupleTypesToList(tuples, unsafe)
|
||||
if !ty.IsListType() {
|
||||
return cty.NilType, nil
|
||||
}
|
||||
|
||||
// the tuples themselves unified as a list, get the overall
|
||||
// unification with this list type instead of the tuple.
|
||||
// make a copy of the types, so we can fallback to the standard
|
||||
// codepath if something went wrong
|
||||
listed := make([]cty.Type, len(types))
|
||||
copy(listed, types)
|
||||
for _, idx := range tupleIdxs {
|
||||
listed[idx] = ty
|
||||
}
|
||||
|
||||
newTy, convs := unify(listed, unsafe)
|
||||
if !newTy.IsListType() {
|
||||
return cty.NilType, nil
|
||||
}
|
||||
|
||||
// we have a good conversion, wrap the nested tuple conversions.
|
||||
// We know the tuple conversion is not nil, because we went from tuple to
|
||||
// list
|
||||
for i, idx := range tupleIdxs {
|
||||
listConv := convs[idx]
|
||||
tupleConv := tupleConvs[i]
|
||||
|
||||
if listConv == nil {
|
||||
convs[idx] = tupleConv
|
||||
continue
|
||||
}
|
||||
|
||||
convs[idx] = func(in cty.Value) (out cty.Value, err error) {
|
||||
out, err = tupleConv(in)
|
||||
if err != nil {
|
||||
return out, err
|
||||
}
|
||||
|
||||
return listConv(in)
|
||||
}
|
||||
}
|
||||
|
||||
return newTy, convs
|
||||
}
|
||||
|
||||
// unifyObjectsAsMaps attempts to first see if the objects unify as maps, then
|
||||
// re-unifies the given types with the map in place of the objects.
|
||||
func unifyObjectsAsMaps(types []cty.Type, unsafe bool) (cty.Type, []Conversion) {
|
||||
var objs []cty.Type
|
||||
var objIdxs []int
|
||||
for i, t := range types {
|
||||
if t.IsObjectType() {
|
||||
objs = append(objs, t)
|
||||
objIdxs = append(objIdxs, i)
|
||||
}
|
||||
}
|
||||
|
||||
ty, objConvs := unifyObjectTypesToMap(objs, unsafe)
|
||||
if !ty.IsMapType() {
|
||||
return cty.NilType, nil
|
||||
}
|
||||
|
||||
// the objects themselves unified as a map, get the overall
|
||||
// unification with this map type instead of the object.
|
||||
// Make a copy of the types, so we can fallback to the standard codepath if
|
||||
// something went wrong without changing the original types.
|
||||
mapped := make([]cty.Type, len(types))
|
||||
copy(mapped, types)
|
||||
for _, idx := range objIdxs {
|
||||
mapped[idx] = ty
|
||||
}
|
||||
|
||||
newTy, convs := unify(mapped, unsafe)
|
||||
if !newTy.IsMapType() {
|
||||
return cty.NilType, nil
|
||||
}
|
||||
|
||||
// we have a good conversion, so wrap the nested object conversions.
|
||||
// We know the object conversion is not nil, because we went from object to
|
||||
// map.
|
||||
for i, idx := range objIdxs {
|
||||
mapConv := convs[idx]
|
||||
objConv := objConvs[i]
|
||||
|
||||
if mapConv == nil {
|
||||
convs[idx] = objConv
|
||||
continue
|
||||
}
|
||||
|
||||
convs[idx] = func(in cty.Value) (out cty.Value, err error) {
|
||||
out, err = objConv(in)
|
||||
if err != nil {
|
||||
return out, err
|
||||
}
|
||||
|
||||
return mapConv(in)
|
||||
}
|
||||
}
|
||||
|
||||
return newTy, convs
|
||||
}
|
||||
|
||||
func unifyCollectionTypes(collectionType func(cty.Type) cty.Type, types []cty.Type, unsafe bool, hasDynamic bool) (cty.Type, []Conversion) {
|
||||
// If we had any dynamic types in the input here then we can't predict
|
||||
// what path we'll take through here once these become known types, so
|
||||
// we'll conservatively produce DynamicVal for these.
|
||||
|
@ -117,7 +261,7 @@ func unifyMapTypes(types []cty.Type, unsafe bool, hasDynamic bool) (cty.Type, []
|
|||
return cty.NilType, nil
|
||||
}
|
||||
|
||||
retTy := cty.Map(retElemType)
|
||||
retTy := collectionType(retElemType)
|
||||
|
||||
conversions := make([]Conversion, len(types))
|
||||
for i, ty := range types {
|
||||
|
|
|
@ -142,7 +142,7 @@ func (f Function) ReturnTypeForValues(args []cty.Value) (ty cty.Type, err error)
|
|||
for i, spec := range f.spec.Params {
|
||||
val := posArgs[i]
|
||||
|
||||
if val.IsMarked() && !spec.AllowMarked {
|
||||
if val.ContainsMarked() && !spec.AllowMarked {
|
||||
// During type checking we just unmark values and discard their
|
||||
// marks, under the assumption that during actual execution of
|
||||
// the function we'll do similarly and then re-apply the marks
|
||||
|
@ -150,7 +150,7 @@ func (f Function) ReturnTypeForValues(args []cty.Value) (ty cty.Type, err error)
|
|||
// inspects values (rather than just types) in its Type
|
||||
// implementation can potentially fail to take into account marks,
|
||||
// unless it specifically opts in to seeing them.
|
||||
unmarked, _ := val.Unmark()
|
||||
unmarked, _ := val.UnmarkDeep()
|
||||
newArgs := make([]cty.Value, len(args))
|
||||
copy(newArgs, args)
|
||||
newArgs[i] = unmarked
|
||||
|
@ -183,9 +183,9 @@ func (f Function) ReturnTypeForValues(args []cty.Value) (ty cty.Type, err error)
|
|||
for i, val := range varArgs {
|
||||
realI := i + len(posArgs)
|
||||
|
||||
if val.IsMarked() && !spec.AllowMarked {
|
||||
if val.ContainsMarked() && !spec.AllowMarked {
|
||||
// See the similar block in the loop above for what's going on here.
|
||||
unmarked, _ := val.Unmark()
|
||||
unmarked, _ := val.UnmarkDeep()
|
||||
newArgs := make([]cty.Value, len(args))
|
||||
copy(newArgs, args)
|
||||
newArgs[realI] = unmarked
|
||||
|
|
|
@ -111,6 +111,7 @@ var LengthFunc = function.New(&function.Spec{
|
|||
Name: "collection",
|
||||
Type: cty.DynamicPseudoType,
|
||||
AllowDynamicType: true,
|
||||
AllowMarked: true,
|
||||
},
|
||||
},
|
||||
Type: func(args []cty.Value) (ret cty.Type, err error) {
|
||||
|
@ -128,8 +129,9 @@ var LengthFunc = function.New(&function.Spec{
|
|||
var ElementFunc = function.New(&function.Spec{
|
||||
Params: []function.Parameter{
|
||||
{
|
||||
Name: "list",
|
||||
Type: cty.DynamicPseudoType,
|
||||
Name: "list",
|
||||
Type: cty.DynamicPseudoType,
|
||||
AllowMarked: true,
|
||||
},
|
||||
{
|
||||
Name: "index",
|
||||
|
@ -184,11 +186,12 @@ var ElementFunc = function.New(&function.Spec{
|
|||
return cty.DynamicVal, fmt.Errorf("cannot use element function with a negative index")
|
||||
}
|
||||
|
||||
if !args[0].IsKnown() {
|
||||
input, marks := args[0].Unmark()
|
||||
if !input.IsKnown() {
|
||||
return cty.UnknownVal(retType), nil
|
||||
}
|
||||
|
||||
l := args[0].LengthInt()
|
||||
l := input.LengthInt()
|
||||
if l == 0 {
|
||||
return cty.DynamicVal, errors.New("cannot use element function with an empty list")
|
||||
}
|
||||
|
@ -196,7 +199,7 @@ var ElementFunc = function.New(&function.Spec{
|
|||
|
||||
// We did all the necessary type checks in the type function above,
|
||||
// so this is guaranteed not to fail.
|
||||
return args[0].Index(cty.NumberIntVal(int64(index))), nil
|
||||
return input.Index(cty.NumberIntVal(int64(index))).WithMarks(marks), nil
|
||||
},
|
||||
})
|
||||
|
||||
|
@ -398,12 +401,14 @@ var DistinctFunc = function.New(&function.Spec{
|
|||
var ChunklistFunc = function.New(&function.Spec{
|
||||
Params: []function.Parameter{
|
||||
{
|
||||
Name: "list",
|
||||
Type: cty.List(cty.DynamicPseudoType),
|
||||
Name: "list",
|
||||
Type: cty.List(cty.DynamicPseudoType),
|
||||
AllowMarked: true,
|
||||
},
|
||||
{
|
||||
Name: "size",
|
||||
Type: cty.Number,
|
||||
Name: "size",
|
||||
Type: cty.Number,
|
||||
AllowMarked: true,
|
||||
},
|
||||
},
|
||||
Type: func(args []cty.Value) (cty.Type, error) {
|
||||
|
@ -411,35 +416,40 @@ var ChunklistFunc = function.New(&function.Spec{
|
|||
},
|
||||
Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) {
|
||||
listVal := args[0]
|
||||
if !listVal.IsKnown() {
|
||||
return cty.UnknownVal(retType), nil
|
||||
}
|
||||
|
||||
if listVal.LengthInt() == 0 {
|
||||
return cty.ListValEmpty(listVal.Type()), nil
|
||||
}
|
||||
sizeVal := args[1]
|
||||
listVal, listMarks := listVal.Unmark()
|
||||
sizeVal, sizeMarks := sizeVal.Unmark()
|
||||
// All return paths below must include .WithMarks(retMarks) to propagate
|
||||
// the top-level marks into the return value. Deep marks inside the
|
||||
// list will just propagate naturally because we treat those values
|
||||
// as opaque here.
|
||||
retMarks := cty.NewValueMarks(listMarks, sizeMarks)
|
||||
|
||||
var size int
|
||||
err = gocty.FromCtyValue(args[1], &size)
|
||||
err = gocty.FromCtyValue(sizeVal, &size)
|
||||
if err != nil {
|
||||
return cty.NilVal, fmt.Errorf("invalid index: %s", err)
|
||||
return cty.NilVal, fmt.Errorf("invalid size: %s", err)
|
||||
}
|
||||
|
||||
if size < 0 {
|
||||
return cty.NilVal, errors.New("the size argument must be positive")
|
||||
}
|
||||
|
||||
if listVal.LengthInt() == 0 {
|
||||
return cty.ListValEmpty(listVal.Type()).WithMarks(retMarks), nil
|
||||
}
|
||||
|
||||
output := make([]cty.Value, 0)
|
||||
|
||||
// if size is 0, returns a list made of the initial list
|
||||
if size == 0 {
|
||||
output = append(output, listVal)
|
||||
return cty.ListVal(output), nil
|
||||
return cty.ListVal(output).WithMarks(retMarks), nil
|
||||
}
|
||||
|
||||
chunk := make([]cty.Value, 0)
|
||||
|
||||
l := args[0].LengthInt()
|
||||
l := listVal.LengthInt()
|
||||
i := 0
|
||||
|
||||
for it := listVal.ElementIterator(); it.Next(); {
|
||||
|
@ -454,7 +464,7 @@ var ChunklistFunc = function.New(&function.Spec{
|
|||
i++
|
||||
}
|
||||
|
||||
return cty.ListVal(output), nil
|
||||
return cty.ListVal(output).WithMarks(retMarks), nil
|
||||
},
|
||||
})
|
||||
|
||||
|
@ -463,8 +473,9 @@ var ChunklistFunc = function.New(&function.Spec{
|
|||
var FlattenFunc = function.New(&function.Spec{
|
||||
Params: []function.Parameter{
|
||||
{
|
||||
Name: "list",
|
||||
Type: cty.DynamicPseudoType,
|
||||
Name: "list",
|
||||
Type: cty.DynamicPseudoType,
|
||||
AllowMarked: true,
|
||||
},
|
||||
},
|
||||
Type: func(args []cty.Value) (cty.Type, error) {
|
||||
|
@ -477,7 +488,8 @@ var FlattenFunc = function.New(&function.Spec{
|
|||
return cty.NilType, errors.New("can only flatten lists, sets and tuples")
|
||||
}
|
||||
|
||||
retVal, known := flattener(args[0])
|
||||
// marks are attached to values, so ignore while determining type
|
||||
retVal, _, known := flattener(args[0])
|
||||
if !known {
|
||||
return cty.DynamicPseudoType, nil
|
||||
}
|
||||
|
@ -490,46 +502,66 @@ var FlattenFunc = function.New(&function.Spec{
|
|||
},
|
||||
Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) {
|
||||
inputList := args[0]
|
||||
if inputList.LengthInt() == 0 {
|
||||
return cty.EmptyTupleVal, nil
|
||||
|
||||
if unmarked, marks := inputList.Unmark(); unmarked.LengthInt() == 0 {
|
||||
return cty.EmptyTupleVal.WithMarks(marks), nil
|
||||
}
|
||||
|
||||
out, known := flattener(inputList)
|
||||
out, markses, known := flattener(inputList)
|
||||
if !known {
|
||||
return cty.UnknownVal(retType), nil
|
||||
return cty.UnknownVal(retType).WithMarks(markses...), nil
|
||||
}
|
||||
|
||||
return cty.TupleVal(out), nil
|
||||
return cty.TupleVal(out).WithMarks(markses...), nil
|
||||
},
|
||||
})
|
||||
|
||||
// Flatten until it's not a cty.List, and return whether the value is known.
|
||||
// We can flatten lists with unknown values, as long as they are not
|
||||
// lists themselves.
|
||||
func flattener(flattenList cty.Value) ([]cty.Value, bool) {
|
||||
func flattener(flattenList cty.Value) ([]cty.Value, []cty.ValueMarks, bool) {
|
||||
var markses []cty.ValueMarks
|
||||
flattenList, flattenListMarks := flattenList.Unmark()
|
||||
if len(flattenListMarks) > 0 {
|
||||
markses = append(markses, flattenListMarks)
|
||||
}
|
||||
if !flattenList.Length().IsKnown() {
|
||||
// If we don't know the length of what we're flattening then we can't
|
||||
// predict the length of our result yet either.
|
||||
return nil, false
|
||||
return nil, markses, false
|
||||
}
|
||||
|
||||
out := make([]cty.Value, 0)
|
||||
isKnown := true
|
||||
for it := flattenList.ElementIterator(); it.Next(); {
|
||||
_, val := it.Element()
|
||||
|
||||
// Any dynamic types could result in more collections that need to be
|
||||
// flattened, so the type cannot be known.
|
||||
if val == cty.DynamicVal {
|
||||
isKnown = false
|
||||
}
|
||||
|
||||
if val.Type().IsListType() || val.Type().IsSetType() || val.Type().IsTupleType() {
|
||||
if !val.IsKnown() {
|
||||
return out, false
|
||||
isKnown = false
|
||||
_, unknownMarks := val.Unmark()
|
||||
markses = append(markses, unknownMarks)
|
||||
continue
|
||||
}
|
||||
|
||||
res, known := flattener(val)
|
||||
if !known {
|
||||
return res, known
|
||||
res, resMarks, known := flattener(val)
|
||||
markses = append(markses, resMarks...)
|
||||
if known {
|
||||
out = append(out, res...)
|
||||
} else {
|
||||
isKnown = false
|
||||
}
|
||||
out = append(out, res...)
|
||||
} else {
|
||||
out = append(out, val)
|
||||
}
|
||||
}
|
||||
return out, true
|
||||
return out, markses, isKnown
|
||||
}
|
||||
|
||||
// KeysFunc is a function that takes a map and returns a sorted list of the map keys.
|
||||
|
@ -539,6 +571,7 @@ var KeysFunc = function.New(&function.Spec{
|
|||
Name: "inputMap",
|
||||
Type: cty.DynamicPseudoType,
|
||||
AllowUnknown: true,
|
||||
AllowMarked: true,
|
||||
},
|
||||
},
|
||||
Type: func(args []cty.Value) (cty.Type, error) {
|
||||
|
@ -563,7 +596,11 @@ var KeysFunc = function.New(&function.Spec{
|
|||
}
|
||||
},
|
||||
Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) {
|
||||
m := args[0]
|
||||
// We must unmark the value before we can use ElementIterator on it, and
|
||||
// then re-apply the same marks (possibly none) when we return. Since we
|
||||
// don't mark map keys, we can throw away any nested marks, which would
|
||||
// only apply to values.
|
||||
m, marks := args[0].Unmark()
|
||||
var keys []cty.Value
|
||||
|
||||
switch {
|
||||
|
@ -576,28 +613,28 @@ var KeysFunc = function.New(&function.Spec{
|
|||
}
|
||||
sort.Strings(names) // same ordering guaranteed by cty's ElementIterator
|
||||
if len(names) == 0 {
|
||||
return cty.EmptyTupleVal, nil
|
||||
return cty.EmptyTupleVal.WithMarks(marks), nil
|
||||
}
|
||||
keys = make([]cty.Value, len(names))
|
||||
for i, name := range names {
|
||||
keys[i] = cty.StringVal(name)
|
||||
}
|
||||
return cty.TupleVal(keys), nil
|
||||
return cty.TupleVal(keys).WithMarks(marks), nil
|
||||
default:
|
||||
if !m.IsKnown() {
|
||||
return cty.UnknownVal(retType), nil
|
||||
return cty.UnknownVal(retType).WithMarks(marks), nil
|
||||
}
|
||||
|
||||
// cty guarantees that ElementIterator will iterate in lexicographical
|
||||
// order by key.
|
||||
for it := args[0].ElementIterator(); it.Next(); {
|
||||
for it := m.ElementIterator(); it.Next(); {
|
||||
k, _ := it.Element()
|
||||
keys = append(keys, k)
|
||||
}
|
||||
if len(keys) == 0 {
|
||||
return cty.ListValEmpty(cty.String), nil
|
||||
return cty.ListValEmpty(cty.String).WithMarks(marks), nil
|
||||
}
|
||||
return cty.ListVal(keys), nil
|
||||
return cty.ListVal(keys).WithMarks(marks), nil
|
||||
}
|
||||
},
|
||||
})
|
||||
|
@ -606,16 +643,19 @@ var KeysFunc = function.New(&function.Spec{
|
|||
var LookupFunc = function.New(&function.Spec{
|
||||
Params: []function.Parameter{
|
||||
{
|
||||
Name: "inputMap",
|
||||
Type: cty.DynamicPseudoType,
|
||||
Name: "inputMap",
|
||||
Type: cty.DynamicPseudoType,
|
||||
AllowMarked: true,
|
||||
},
|
||||
{
|
||||
Name: "key",
|
||||
Type: cty.String,
|
||||
Name: "key",
|
||||
Type: cty.String,
|
||||
AllowMarked: true,
|
||||
},
|
||||
{
|
||||
Name: "default",
|
||||
Type: cty.DynamicPseudoType,
|
||||
Name: "default",
|
||||
Type: cty.DynamicPseudoType,
|
||||
AllowMarked: true,
|
||||
},
|
||||
},
|
||||
Type: func(args []cty.Value) (ret cty.Type, err error) {
|
||||
|
@ -627,7 +667,8 @@ var LookupFunc = function.New(&function.Spec{
|
|||
return cty.DynamicPseudoType, nil
|
||||
}
|
||||
|
||||
key := args[1].AsString()
|
||||
keyVal, _ := args[1].Unmark()
|
||||
key := keyVal.AsString()
|
||||
if ty.HasAttribute(key) {
|
||||
return args[0].GetAttr(key).Type(), nil
|
||||
} else if len(args) == 3 {
|
||||
|
@ -649,28 +690,39 @@ var LookupFunc = function.New(&function.Spec{
|
|||
}
|
||||
},
|
||||
Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) {
|
||||
// leave default value marked
|
||||
defaultVal := args[2]
|
||||
|
||||
mapVar := args[0]
|
||||
lookupKey := args[1].AsString()
|
||||
var markses []cty.ValueMarks
|
||||
|
||||
// unmark collection, retain marks to reapply later
|
||||
mapVar, mapMarks := args[0].Unmark()
|
||||
markses = append(markses, mapMarks)
|
||||
|
||||
// include marks on the key in the result
|
||||
keyVal, keyMarks := args[1].Unmark()
|
||||
if len(keyMarks) > 0 {
|
||||
markses = append(markses, keyMarks)
|
||||
}
|
||||
lookupKey := keyVal.AsString()
|
||||
|
||||
if !mapVar.IsWhollyKnown() {
|
||||
return cty.UnknownVal(retType), nil
|
||||
return cty.UnknownVal(retType).WithMarks(markses...), nil
|
||||
}
|
||||
|
||||
if mapVar.Type().IsObjectType() {
|
||||
if mapVar.Type().HasAttribute(lookupKey) {
|
||||
return mapVar.GetAttr(lookupKey), nil
|
||||
return mapVar.GetAttr(lookupKey).WithMarks(markses...), nil
|
||||
}
|
||||
} else if mapVar.HasIndex(cty.StringVal(lookupKey)) == cty.True {
|
||||
return mapVar.Index(cty.StringVal(lookupKey)), nil
|
||||
return mapVar.Index(cty.StringVal(lookupKey)).WithMarks(markses...), nil
|
||||
}
|
||||
|
||||
defaultVal, err = convert.Convert(defaultVal, retType)
|
||||
if err != nil {
|
||||
return cty.NilVal, err
|
||||
}
|
||||
return defaultVal, nil
|
||||
return defaultVal.WithMarks(markses...), nil
|
||||
},
|
||||
})
|
||||
|
||||
|
@ -687,6 +739,7 @@ var MergeFunc = function.New(&function.Spec{
|
|||
Type: cty.DynamicPseudoType,
|
||||
AllowDynamicType: true,
|
||||
AllowNull: true,
|
||||
AllowMarked: true,
|
||||
},
|
||||
Type: func(args []cty.Value) (cty.Type, error) {
|
||||
// empty args is accepted, so assume an empty object since we have no
|
||||
|
@ -712,6 +765,8 @@ var MergeFunc = function.New(&function.Spec{
|
|||
if !ty.IsMapType() && !ty.IsObjectType() {
|
||||
return cty.NilType, fmt.Errorf("arguments must be maps or objects, got %#v", ty.FriendlyName())
|
||||
}
|
||||
// marks are attached to values, so ignore while determining type
|
||||
arg, _ = arg.Unmark()
|
||||
|
||||
switch {
|
||||
case ty.IsObjectType() && !arg.IsNull():
|
||||
|
@ -761,11 +816,16 @@ var MergeFunc = function.New(&function.Spec{
|
|||
},
|
||||
Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) {
|
||||
outputMap := make(map[string]cty.Value)
|
||||
var markses []cty.ValueMarks // remember any marked maps/objects we find
|
||||
|
||||
for _, arg := range args {
|
||||
if arg.IsNull() {
|
||||
continue
|
||||
}
|
||||
arg, argMarks := arg.Unmark()
|
||||
if len(argMarks) > 0 {
|
||||
markses = append(markses, argMarks)
|
||||
}
|
||||
for it := arg.ElementIterator(); it.Next(); {
|
||||
k, v := it.Element()
|
||||
outputMap[k.AsString()] = v
|
||||
|
@ -775,11 +835,11 @@ var MergeFunc = function.New(&function.Spec{
|
|||
switch {
|
||||
case retType.IsMapType():
|
||||
if len(outputMap) == 0 {
|
||||
return cty.MapValEmpty(retType.ElementType()), nil
|
||||
return cty.MapValEmpty(retType.ElementType()).WithMarks(markses...), nil
|
||||
}
|
||||
return cty.MapVal(outputMap), nil
|
||||
return cty.MapVal(outputMap).WithMarks(markses...), nil
|
||||
case retType.IsObjectType(), retType.Equals(cty.DynamicPseudoType):
|
||||
return cty.ObjectVal(outputMap), nil
|
||||
return cty.ObjectVal(outputMap).WithMarks(markses...), nil
|
||||
default:
|
||||
panic(fmt.Sprintf("unexpected return type: %#v", retType))
|
||||
}
|
||||
|
@ -791,8 +851,9 @@ var MergeFunc = function.New(&function.Spec{
|
|||
var ReverseListFunc = function.New(&function.Spec{
|
||||
Params: []function.Parameter{
|
||||
{
|
||||
Name: "list",
|
||||
Type: cty.DynamicPseudoType,
|
||||
Name: "list",
|
||||
Type: cty.DynamicPseudoType,
|
||||
AllowMarked: true,
|
||||
},
|
||||
},
|
||||
Type: func(args []cty.Value) (cty.Type, error) {
|
||||
|
@ -812,19 +873,21 @@ var ReverseListFunc = function.New(&function.Spec{
|
|||
}
|
||||
},
|
||||
Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) {
|
||||
in := args[0].AsValueSlice()
|
||||
outVals := make([]cty.Value, len(in))
|
||||
for i, v := range in {
|
||||
in, marks := args[0].Unmark()
|
||||
inVals := in.AsValueSlice()
|
||||
outVals := make([]cty.Value, len(inVals))
|
||||
|
||||
for i, v := range inVals {
|
||||
outVals[len(outVals)-i-1] = v
|
||||
}
|
||||
switch {
|
||||
case retType.IsTupleType():
|
||||
return cty.TupleVal(outVals), nil
|
||||
return cty.TupleVal(outVals).WithMarks(marks), nil
|
||||
default:
|
||||
if len(outVals) == 0 {
|
||||
return cty.ListValEmpty(retType.ElementType()), nil
|
||||
return cty.ListValEmpty(retType.ElementType()).WithMarks(marks), nil
|
||||
}
|
||||
return cty.ListVal(outVals), nil
|
||||
return cty.ListVal(outVals).WithMarks(marks), nil
|
||||
}
|
||||
},
|
||||
})
|
||||
|
@ -836,8 +899,9 @@ var ReverseListFunc = function.New(&function.Spec{
|
|||
var SetProductFunc = function.New(&function.Spec{
|
||||
Params: []function.Parameter{},
|
||||
VarParam: &function.Parameter{
|
||||
Name: "sets",
|
||||
Type: cty.DynamicPseudoType,
|
||||
Name: "sets",
|
||||
Type: cty.DynamicPseudoType,
|
||||
AllowMarked: true,
|
||||
},
|
||||
Type: func(args []cty.Value) (retType cty.Type, err error) {
|
||||
if len(args) < 2 {
|
||||
|
@ -881,11 +945,19 @@ var SetProductFunc = function.New(&function.Spec{
|
|||
},
|
||||
Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) {
|
||||
ety := retType.ElementType()
|
||||
var retMarks cty.ValueMarks
|
||||
|
||||
total := 1
|
||||
var hasUnknownLength bool
|
||||
for _, arg := range args {
|
||||
arg, marks := arg.Unmark()
|
||||
retMarks = cty.NewValueMarks(retMarks, marks)
|
||||
|
||||
// Continue processing after we find an argument with unknown
|
||||
// length to ensure that we cover all the marks
|
||||
if !arg.Length().IsKnown() {
|
||||
return cty.UnknownVal(retType), nil
|
||||
hasUnknownLength = true
|
||||
continue
|
||||
}
|
||||
|
||||
// Because of our type checking function, we are guaranteed that
|
||||
|
@ -894,13 +966,17 @@ var SetProductFunc = function.New(&function.Spec{
|
|||
total *= arg.LengthInt()
|
||||
}
|
||||
|
||||
if hasUnknownLength {
|
||||
return cty.UnknownVal(retType).WithMarks(retMarks), nil
|
||||
}
|
||||
|
||||
if total == 0 {
|
||||
// If any of the arguments was an empty collection then our result
|
||||
// is also an empty collection, which we'll short-circuit here.
|
||||
if retType.IsListType() {
|
||||
return cty.ListValEmpty(ety), nil
|
||||
return cty.ListValEmpty(ety).WithMarks(retMarks), nil
|
||||
}
|
||||
return cty.SetValEmpty(ety), nil
|
||||
return cty.SetValEmpty(ety).WithMarks(retMarks), nil
|
||||
}
|
||||
|
||||
subEtys := ety.TupleElementTypes()
|
||||
|
@ -911,6 +987,8 @@ var SetProductFunc = function.New(&function.Spec{
|
|||
s := 0
|
||||
argVals := make([][]cty.Value, len(args))
|
||||
for i, arg := range args {
|
||||
// We've already stored the marks in retMarks
|
||||
arg, _ := arg.Unmark()
|
||||
argVals[i] = arg.AsValueSlice()
|
||||
}
|
||||
|
||||
|
@ -950,9 +1028,9 @@ var SetProductFunc = function.New(&function.Spec{
|
|||
}
|
||||
|
||||
if retType.IsListType() {
|
||||
return cty.ListVal(productVals), nil
|
||||
return cty.ListVal(productVals).WithMarks(retMarks), nil
|
||||
}
|
||||
return cty.SetVal(productVals), nil
|
||||
return cty.SetVal(productVals).WithMarks(retMarks), nil
|
||||
},
|
||||
})
|
||||
|
||||
|
@ -961,8 +1039,9 @@ var SetProductFunc = function.New(&function.Spec{
|
|||
var SliceFunc = function.New(&function.Spec{
|
||||
Params: []function.Parameter{
|
||||
{
|
||||
Name: "list",
|
||||
Type: cty.DynamicPseudoType,
|
||||
Name: "list",
|
||||
Type: cty.DynamicPseudoType,
|
||||
AllowMarked: true,
|
||||
},
|
||||
{
|
||||
Name: "start_index",
|
||||
|
@ -1001,10 +1080,10 @@ var SliceFunc = function.New(&function.Spec{
|
|||
return cty.Tuple(argTy.TupleElementTypes()[startIndex:endIndex]), nil
|
||||
},
|
||||
Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) {
|
||||
inputList := args[0]
|
||||
inputList, marks := args[0].Unmark()
|
||||
|
||||
if retType == cty.DynamicPseudoType {
|
||||
return cty.DynamicVal, nil
|
||||
return cty.DynamicVal.WithMarks(marks), nil
|
||||
}
|
||||
|
||||
// we ignore idxsKnown return value here because the indices are always
|
||||
|
@ -1016,18 +1095,18 @@ var SliceFunc = function.New(&function.Spec{
|
|||
|
||||
if endIndex-startIndex == 0 {
|
||||
if retType.IsTupleType() {
|
||||
return cty.EmptyTupleVal, nil
|
||||
return cty.EmptyTupleVal.WithMarks(marks), nil
|
||||
}
|
||||
return cty.ListValEmpty(retType.ElementType()), nil
|
||||
return cty.ListValEmpty(retType.ElementType()).WithMarks(marks), nil
|
||||
}
|
||||
|
||||
outputList := inputList.AsValueSlice()[startIndex:endIndex]
|
||||
|
||||
if retType.IsTupleType() {
|
||||
return cty.TupleVal(outputList), nil
|
||||
return cty.TupleVal(outputList).WithMarks(marks), nil
|
||||
}
|
||||
|
||||
return cty.ListVal(outputList), nil
|
||||
return cty.ListVal(outputList).WithMarks(marks), nil
|
||||
},
|
||||
})
|
||||
|
||||
|
@ -1035,9 +1114,12 @@ func sliceIndexes(args []cty.Value) (int, int, bool, error) {
|
|||
var startIndex, endIndex, length int
|
||||
var startKnown, endKnown, lengthKnown bool
|
||||
|
||||
// remove marks from args[0]
|
||||
list, _ := args[0].Unmark()
|
||||
|
||||
// If it's a tuple then we always know the length by the type, but collections might be unknown or have unknown length
|
||||
if args[0].Type().IsTupleType() || args[0].Length().IsKnown() {
|
||||
length = args[0].LengthInt()
|
||||
if list.Type().IsTupleType() || list.Length().IsKnown() {
|
||||
length = list.LengthInt()
|
||||
lengthKnown = true
|
||||
}
|
||||
|
||||
|
@ -1078,8 +1160,9 @@ func sliceIndexes(args []cty.Value) (int, int, bool, error) {
|
|||
var ValuesFunc = function.New(&function.Spec{
|
||||
Params: []function.Parameter{
|
||||
{
|
||||
Name: "values",
|
||||
Type: cty.DynamicPseudoType,
|
||||
Name: "values",
|
||||
Type: cty.DynamicPseudoType,
|
||||
AllowMarked: true,
|
||||
},
|
||||
},
|
||||
Type: func(args []cty.Value) (ret cty.Type, err error) {
|
||||
|
@ -1112,6 +1195,13 @@ var ValuesFunc = function.New(&function.Spec{
|
|||
Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) {
|
||||
mapVar := args[0]
|
||||
|
||||
// We must unmark the value before we can use ElementIterator on it,
|
||||
// and then re-apply the same marks (possibly none) when we return.
|
||||
// (We leave the inner values just as they are, because we won't be
|
||||
// doing anything with them aside from copying them verbatim into the
|
||||
// result, marks and all.)
|
||||
mapVar, marks := mapVar.Unmark()
|
||||
|
||||
// We can just iterate the map/object value here because cty guarantees
|
||||
// that these types always iterate in key lexicographical order.
|
||||
var values []cty.Value
|
||||
|
@ -1120,13 +1210,15 @@ var ValuesFunc = function.New(&function.Spec{
|
|||
values = append(values, val)
|
||||
}
|
||||
|
||||
// All of the return paths must include .WithMarks(marks) so that we
|
||||
// will preserve the markings of the overall map/object we were given.
|
||||
if retType.IsTupleType() {
|
||||
return cty.TupleVal(values), nil
|
||||
return cty.TupleVal(values).WithMarks(marks), nil
|
||||
}
|
||||
if len(values) == 0 {
|
||||
return cty.ListValEmpty(retType.ElementType()), nil
|
||||
return cty.ListValEmpty(retType.ElementType()).WithMarks(marks), nil
|
||||
}
|
||||
return cty.ListVal(values), nil
|
||||
return cty.ListVal(values).WithMarks(marks), nil
|
||||
},
|
||||
})
|
||||
|
||||
|
@ -1135,12 +1227,14 @@ var ValuesFunc = function.New(&function.Spec{
|
|||
var ZipmapFunc = function.New(&function.Spec{
|
||||
Params: []function.Parameter{
|
||||
{
|
||||
Name: "keys",
|
||||
Type: cty.List(cty.String),
|
||||
Name: "keys",
|
||||
Type: cty.List(cty.String),
|
||||
AllowMarked: true,
|
||||
},
|
||||
{
|
||||
Name: "values",
|
||||
Type: cty.DynamicPseudoType,
|
||||
Name: "values",
|
||||
Type: cty.DynamicPseudoType,
|
||||
AllowMarked: true,
|
||||
},
|
||||
},
|
||||
Type: func(args []cty.Value) (ret cty.Type, err error) {
|
||||
|
@ -1158,6 +1252,13 @@ var ZipmapFunc = function.New(&function.Spec{
|
|||
return cty.DynamicPseudoType, nil
|
||||
}
|
||||
|
||||
// NOTE: Marking of the keys list can't be represented in the
|
||||
// result type, so the tuple type here will disclose the keys.
|
||||
// This is unfortunate but is a common compromise with dynamic
|
||||
// return types; the result from Impl will still reflect the marks
|
||||
// from the keys list, so a mark-using caller should look out for
|
||||
// that if it's important for their use-case.
|
||||
keys, _ := keys.Unmark()
|
||||
keysRaw := keys.AsValueSlice()
|
||||
valueTypesRaw := valuesTy.TupleElementTypes()
|
||||
if len(keysRaw) != len(valueTypesRaw) {
|
||||
|
@ -1165,6 +1266,7 @@ var ZipmapFunc = function.New(&function.Spec{
|
|||
}
|
||||
atys := make(map[string]cty.Type, len(valueTypesRaw))
|
||||
for i, keyVal := range keysRaw {
|
||||
keyVal, _ = keyVal.Unmark()
|
||||
if keyVal.IsNull() {
|
||||
return cty.NilType, fmt.Errorf("keys list has null value at index %d", i)
|
||||
}
|
||||
|
@ -1180,11 +1282,17 @@ var ZipmapFunc = function.New(&function.Spec{
|
|||
Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) {
|
||||
keys := args[0]
|
||||
values := args[1]
|
||||
keys, keysMarks := keys.Unmark()
|
||||
values, valuesMarks := values.Unmark()
|
||||
|
||||
// All of our return paths must pass through the merged marks from
|
||||
// both the keys and the values, if any, using .WithMarks(retMarks)
|
||||
retMarks := cty.NewValueMarks(keysMarks, valuesMarks)
|
||||
|
||||
if !keys.IsWhollyKnown() {
|
||||
// Unknown map keys and object attributes are not supported, so
|
||||
// our entire result must be unknown in this case.
|
||||
return cty.UnknownVal(retType), nil
|
||||
return cty.UnknownVal(retType).WithMarks(retMarks), nil
|
||||
}
|
||||
|
||||
// both keys and values are guaranteed to be shallowly-known here,
|
||||
|
@ -1198,19 +1306,25 @@ var ZipmapFunc = function.New(&function.Spec{
|
|||
i := 0
|
||||
for it := keys.ElementIterator(); it.Next(); {
|
||||
_, v := it.Element()
|
||||
v, vMarks := v.Unmark()
|
||||
val := values.Index(cty.NumberIntVal(int64(i)))
|
||||
output[v.AsString()] = val
|
||||
|
||||
// We also need to accumulate the individual key marks on the
|
||||
// returned map, because keys can't carry marks on their own.
|
||||
retMarks = cty.NewValueMarks(retMarks, vMarks)
|
||||
|
||||
i++
|
||||
}
|
||||
|
||||
switch {
|
||||
case retType.IsMapType():
|
||||
if len(output) == 0 {
|
||||
return cty.MapValEmpty(retType.ElementType()), nil
|
||||
return cty.MapValEmpty(retType.ElementType()).WithMarks(retMarks), nil
|
||||
}
|
||||
return cty.MapVal(output), nil
|
||||
return cty.MapVal(output).WithMarks(retMarks), nil
|
||||
case retType.IsObjectType():
|
||||
return cty.ObjectVal(output), nil
|
||||
return cty.ObjectVal(output).WithMarks(retMarks), nil
|
||||
default:
|
||||
// Should never happen because the type-check function should've
|
||||
// caught any other case.
|
||||
|
|
|
@ -30,7 +30,7 @@ var CSVDecodeFunc = function.New(&function.Spec{
|
|||
return cty.DynamicPseudoType, fmt.Errorf("missing header line")
|
||||
}
|
||||
if err != nil {
|
||||
return cty.DynamicPseudoType, err
|
||||
return cty.DynamicPseudoType, csvError(err)
|
||||
}
|
||||
|
||||
atys := make(map[string]cty.Type, len(headers))
|
||||
|
@ -64,7 +64,7 @@ var CSVDecodeFunc = function.New(&function.Spec{
|
|||
break
|
||||
}
|
||||
if err != nil {
|
||||
return cty.DynamicVal, err
|
||||
return cty.DynamicVal, csvError(err)
|
||||
}
|
||||
|
||||
vals := make(map[string]cty.Value, len(cols))
|
||||
|
@ -91,3 +91,12 @@ var CSVDecodeFunc = function.New(&function.Spec{
|
|||
func CSVDecode(str cty.Value) (cty.Value, error) {
|
||||
return CSVDecodeFunc.Call([]cty.Value{str})
|
||||
}
|
||||
|
||||
func csvError(err error) error {
|
||||
switch err := err.(type) {
|
||||
case *csv.ParseError:
|
||||
return fmt.Errorf("CSV parse error on line %d: %w", err.Line, err.Err)
|
||||
default:
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,7 +6,7 @@ import (
|
|||
"math/big"
|
||||
"strings"
|
||||
|
||||
"github.com/apparentlymart/go-textseg/v12/textseg"
|
||||
"github.com/apparentlymart/go-textseg/v13/textseg"
|
||||
|
||||
"github.com/zclconf/go-cty/cty"
|
||||
"github.com/zclconf/go-cty/cty/convert"
|
||||
|
@ -114,6 +114,8 @@ var FormatListFunc = function.New(&function.Spec{
|
|||
continue
|
||||
}
|
||||
iterators[i] = arg.ElementIterator()
|
||||
case arg == cty.DynamicVal:
|
||||
unknowns[i] = true
|
||||
default:
|
||||
singleVals[i] = arg
|
||||
}
|
||||
|
|
|
@ -371,14 +371,21 @@ var CeilFunc = function.New(&function.Spec{
|
|||
},
|
||||
Type: function.StaticReturnType(cty.Number),
|
||||
Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) {
|
||||
var val float64
|
||||
if err := gocty.FromCtyValue(args[0], &val); err != nil {
|
||||
return cty.UnknownVal(cty.String), err
|
||||
f := args[0].AsBigFloat()
|
||||
|
||||
if f.IsInf() {
|
||||
return cty.NumberVal(f), nil
|
||||
}
|
||||
if math.IsInf(val, 0) {
|
||||
return cty.NumberFloatVal(val), nil
|
||||
|
||||
i, acc := f.Int(nil)
|
||||
switch acc {
|
||||
case big.Exact, big.Above:
|
||||
// Done.
|
||||
case big.Below:
|
||||
i.Add(i, big.NewInt(1))
|
||||
}
|
||||
return cty.NumberIntVal(int64(math.Ceil(val))), nil
|
||||
|
||||
return cty.NumberVal(f.SetInt(i)), nil
|
||||
},
|
||||
})
|
||||
|
||||
|
@ -393,14 +400,21 @@ var FloorFunc = function.New(&function.Spec{
|
|||
},
|
||||
Type: function.StaticReturnType(cty.Number),
|
||||
Impl: func(args []cty.Value, retType cty.Type) (ret cty.Value, err error) {
|
||||
var val float64
|
||||
if err := gocty.FromCtyValue(args[0], &val); err != nil {
|
||||
return cty.UnknownVal(cty.String), err
|
||||
f := args[0].AsBigFloat()
|
||||
|
||||
if f.IsInf() {
|
||||
return cty.NumberVal(f), nil
|
||||
}
|
||||
if math.IsInf(val, 0) {
|
||||
return cty.NumberFloatVal(val), nil
|
||||
|
||||
i, acc := f.Int(nil)
|
||||
switch acc {
|
||||
case big.Exact, big.Below:
|
||||
// Done.
|
||||
case big.Above:
|
||||
i.Sub(i, big.NewInt(1))
|
||||
}
|
||||
return cty.NumberIntVal(int64(math.Floor(val))), nil
|
||||
|
||||
return cty.NumberVal(f.SetInt(i)), nil
|
||||
},
|
||||
})
|
||||
|
||||
|
|
|
@ -11,8 +11,9 @@ import (
|
|||
var ConcatFunc = function.New(&function.Spec{
|
||||
Params: []function.Parameter{},
|
||||
VarParam: &function.Parameter{
|
||||
Name: "seqs",
|
||||
Type: cty.DynamicPseudoType,
|
||||
Name: "seqs",
|
||||
Type: cty.DynamicPseudoType,
|
||||
AllowMarked: true,
|
||||
},
|
||||
Type: func(args []cty.Value) (ret cty.Type, err error) {
|
||||
if len(args) == 0 {
|
||||
|
@ -42,6 +43,10 @@ var ConcatFunc = function.New(&function.Spec{
|
|||
|
||||
etys := make([]cty.Type, 0, len(args))
|
||||
for i, val := range args {
|
||||
// Discard marks for nested values, as we only need to handle types
|
||||
// and lengths.
|
||||
val, _ := val.UnmarkDeep()
|
||||
|
||||
ety := val.Type()
|
||||
switch {
|
||||
case ety.IsTupleType():
|
||||
|
@ -75,6 +80,7 @@ var ConcatFunc = function.New(&function.Spec{
|
|||
// given values will be lists and that they will either be of
|
||||
// retType or of something we can convert to retType.
|
||||
vals := make([]cty.Value, 0, len(args))
|
||||
var markses []cty.ValueMarks // remember any marked lists we find
|
||||
for i, list := range args {
|
||||
list, err = convert.Convert(list, retType)
|
||||
if err != nil {
|
||||
|
@ -83,6 +89,11 @@ var ConcatFunc = function.New(&function.Spec{
|
|||
return cty.NilVal, function.NewArgError(i, err)
|
||||
}
|
||||
|
||||
list, listMarks := list.Unmark()
|
||||
if len(listMarks) > 0 {
|
||||
markses = append(markses, listMarks)
|
||||
}
|
||||
|
||||
it := list.ElementIterator()
|
||||
for it.Next() {
|
||||
_, v := it.Element()
|
||||
|
@ -90,10 +101,10 @@ var ConcatFunc = function.New(&function.Spec{
|
|||
}
|
||||
}
|
||||
if len(vals) == 0 {
|
||||
return cty.ListValEmpty(retType.ElementType()), nil
|
||||
return cty.ListValEmpty(retType.ElementType()).WithMarks(markses...), nil
|
||||
}
|
||||
|
||||
return cty.ListVal(vals), nil
|
||||
return cty.ListVal(vals).WithMarks(markses...), nil
|
||||
case retType.IsTupleType():
|
||||
// If retType is a tuple type then we could have a mixture of
|
||||
// lists and tuples but we know they all have known values
|
||||
|
@ -101,8 +112,14 @@ var ConcatFunc = function.New(&function.Spec{
|
|||
// concatenating them all together will produce a tuple of
|
||||
// retType because of the work we did in the Type function above.
|
||||
vals := make([]cty.Value, 0, len(args))
|
||||
var markses []cty.ValueMarks // remember any marked seqs we find
|
||||
|
||||
for _, seq := range args {
|
||||
seq, seqMarks := seq.Unmark()
|
||||
if len(seqMarks) > 0 {
|
||||
markses = append(markses, seqMarks)
|
||||
}
|
||||
|
||||
// Both lists and tuples support ElementIterator, so this is easy.
|
||||
it := seq.ElementIterator()
|
||||
for it.Next() {
|
||||
|
@ -111,7 +128,7 @@ var ConcatFunc = function.New(&function.Spec{
|
|||
}
|
||||
}
|
||||
|
||||
return cty.TupleVal(vals), nil
|
||||
return cty.TupleVal(vals).WithMarks(markses...), nil
|
||||
default:
|
||||
// should never happen if Type is working correctly above
|
||||
panic("unsupported return type")
|
||||
|
|
|
@ -6,7 +6,7 @@ import (
|
|||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/apparentlymart/go-textseg/v12/textseg"
|
||||
"github.com/apparentlymart/go-textseg/v13/textseg"
|
||||
|
||||
"github.com/zclconf/go-cty/cty"
|
||||
"github.com/zclconf/go-cty/cty/function"
|
||||
|
@ -151,7 +151,6 @@ var SubstrFunc = function.New(&function.Spec{
|
|||
return cty.StringVal(""), nil
|
||||
}
|
||||
|
||||
|
||||
sub := in
|
||||
pos := 0
|
||||
var i int
|
||||
|
|
|
@ -27,14 +27,32 @@ type marker struct {
|
|||
type ValueMarks map[interface{}]struct{}
|
||||
|
||||
// NewValueMarks constructs a new ValueMarks set with the given mark values.
|
||||
//
|
||||
// If any of the arguments are already ValueMarks values then they'll be merged
|
||||
// into the result, rather than used directly as individual marks.
|
||||
func NewValueMarks(marks ...interface{}) ValueMarks {
|
||||
if len(marks) == 0 {
|
||||
return nil
|
||||
}
|
||||
ret := make(ValueMarks, len(marks))
|
||||
for _, v := range marks {
|
||||
if vm, ok := v.(ValueMarks); ok {
|
||||
// Constructing a new ValueMarks with an existing ValueMarks
|
||||
// implements a merge operation. (This can cause our result to
|
||||
// have a larger size than we expected, but that's okay.)
|
||||
for v := range vm {
|
||||
ret[v] = struct{}{}
|
||||
}
|
||||
continue
|
||||
}
|
||||
ret[v] = struct{}{}
|
||||
}
|
||||
if len(ret) == 0 {
|
||||
// If we were merging ValueMarks values together and they were all
|
||||
// empty then we'll avoid returning a zero-length map and return a
|
||||
// nil instead, as is conventional.
|
||||
return nil
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
|
@ -180,6 +198,9 @@ func (val Value) Mark(mark interface{}) Value {
|
|||
for k, v := range mr.marks {
|
||||
newMarker.marks[k] = v
|
||||
}
|
||||
// unwrap the inner marked value, so we don't get multiple layers
|
||||
// of marking.
|
||||
newMarker.realV = mr.realV
|
||||
} else {
|
||||
// It's not a marker yet, so we're creating the first mark.
|
||||
newMarker.marks = make(ValueMarks, 1)
|
||||
|
|
|
@ -112,7 +112,7 @@ func (t typeObject) GoString() string {
|
|||
return "cty.EmptyObject"
|
||||
}
|
||||
if len(t.AttrOptional) > 0 {
|
||||
opt := make([]string, len(t.AttrOptional))
|
||||
var opt []string
|
||||
for k := range t.AttrOptional {
|
||||
opt = append(opt, k)
|
||||
}
|
||||
|
|
|
@ -52,6 +52,51 @@ func (t primitiveType) GoString() string {
|
|||
}
|
||||
}
|
||||
|
||||
// rawNumberEqual is our cty-specific definition of whether two big floats
|
||||
// underlying cty.Number are "equal" for the purposes of the Value.Equals and
|
||||
// Value.RawEquals methods.
|
||||
//
|
||||
// The built-in equality for big.Float is a direct comparison of the mantissa
|
||||
// bits and the exponent, but that's too precise a check for cty because we
|
||||
// routinely send numbers through decimal approximations and back and so
|
||||
// we only promise to accurately represent the subset of binary floating point
|
||||
// numbers that can be derived from a decimal string representation.
|
||||
//
|
||||
// In respect of the fact that cty only tries to preserve numbers that can
|
||||
// reasonably be written in JSON documents, we use the string representation of
|
||||
// a decimal approximation of the number as our comparison, relying on the
|
||||
// big.Float type's heuristic for discarding extraneous mantissa bits that seem
|
||||
// likely to only be there as a result of an earlier decimal-to-binary
|
||||
// approximation during parsing, e.g. in ParseNumberVal.
|
||||
func rawNumberEqual(a, b *big.Float) bool {
|
||||
switch {
|
||||
case (a == nil) != (b == nil):
|
||||
return false
|
||||
case a == nil: // b == nil too then, due to previous case
|
||||
return true
|
||||
default:
|
||||
// This format and precision matches that used by cty/json.Marshal,
|
||||
// and thus achieves our definition of "two numbers are equal if
|
||||
// we'd use the same JSON serialization for both of them".
|
||||
const format = 'f'
|
||||
const prec = -1
|
||||
aStr := a.Text(format, prec)
|
||||
bStr := b.Text(format, prec)
|
||||
|
||||
// The one exception to our rule about equality-by-stringification is
|
||||
// negative zero, because we want -0 to always be equal to +0.
|
||||
const posZero = "0"
|
||||
const negZero = "-0"
|
||||
if aStr == negZero {
|
||||
aStr = posZero
|
||||
}
|
||||
if bStr == negZero {
|
||||
bStr = posZero
|
||||
}
|
||||
return aStr == bStr
|
||||
}
|
||||
}
|
||||
|
||||
// Number is the numeric type. Number values are arbitrary-precision
|
||||
// decimal numbers, which can then be converted into Go's various numeric
|
||||
// types only if they are in the appropriate range.
|
||||
|
|
|
@ -115,6 +115,44 @@ func (t Type) HasDynamicTypes() bool {
|
|||
}
|
||||
}
|
||||
|
||||
// WithoutOptionalAttributesDeep returns a type equivalent to the receiver but
|
||||
// with any objects with optional attributes converted into fully concrete
|
||||
// object types. This operation is applied recursively.
|
||||
func (t Type) WithoutOptionalAttributesDeep() Type {
|
||||
switch {
|
||||
case t == DynamicPseudoType, t.IsPrimitiveType(), t.IsCapsuleType():
|
||||
return t
|
||||
case t.IsMapType():
|
||||
return Map(t.ElementType().WithoutOptionalAttributesDeep())
|
||||
case t.IsListType():
|
||||
return List(t.ElementType().WithoutOptionalAttributesDeep())
|
||||
case t.IsSetType():
|
||||
return Set(t.ElementType().WithoutOptionalAttributesDeep())
|
||||
case t.IsTupleType():
|
||||
originalElemTypes := t.TupleElementTypes()
|
||||
elemTypes := make([]Type, len(originalElemTypes))
|
||||
for i, et := range originalElemTypes {
|
||||
elemTypes[i] = et.WithoutOptionalAttributesDeep()
|
||||
}
|
||||
return Tuple(elemTypes)
|
||||
case t.IsObjectType():
|
||||
originalAttrTypes := t.AttributeTypes()
|
||||
attrTypes := make(map[string]Type, len(originalAttrTypes))
|
||||
for k, t := range originalAttrTypes {
|
||||
attrTypes[k] = t.WithoutOptionalAttributesDeep()
|
||||
}
|
||||
|
||||
// This is the subtle line which does all the work of this function: by
|
||||
// constructing a new Object type with these attribute types, we drop
|
||||
// the list of optional attributes (if present). This results in a
|
||||
// concrete Object type which requires all of the original attributes.
|
||||
return Object(attrTypes)
|
||||
default:
|
||||
// Should never happen, since above should be exhaustive
|
||||
panic("WithoutOptionalAttributesDeep does not support the given type")
|
||||
}
|
||||
}
|
||||
|
||||
type friendlyTypeNameMode rune
|
||||
|
||||
const (
|
||||
|
|
|
@ -186,6 +186,20 @@ func ListValEmpty(element Type) Value {
|
|||
}
|
||||
}
|
||||
|
||||
// CanListVal returns false if the given Values can not be coalesced
|
||||
// into a single List due to inconsistent element types.
|
||||
func CanListVal(vals []Value) bool {
|
||||
elementType := DynamicPseudoType
|
||||
for _, val := range vals {
|
||||
if elementType == DynamicPseudoType {
|
||||
elementType = val.ty
|
||||
} else if val.ty != DynamicPseudoType && !elementType.Equals(val.ty) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// MapVal returns a Value of a map type whose element type is defined by
|
||||
// the types of the given values, which must be homogenous.
|
||||
//
|
||||
|
@ -227,6 +241,20 @@ func MapValEmpty(element Type) Value {
|
|||
}
|
||||
}
|
||||
|
||||
// CanMapVal returns false if the given Values can not be coalesced into a
|
||||
// single Map due to inconsistent element types.
|
||||
func CanMapVal(vals map[string]Value) bool {
|
||||
elementType := DynamicPseudoType
|
||||
for _, val := range vals {
|
||||
if elementType == DynamicPseudoType {
|
||||
elementType = val.ty
|
||||
} else if val.ty != DynamicPseudoType && !elementType.Equals(val.ty) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// SetVal returns a Value of set type whose element type is defined by
|
||||
// the types of the given values, which must be homogenous.
|
||||
//
|
||||
|
@ -267,6 +295,26 @@ func SetVal(vals []Value) Value {
|
|||
}.WithMarks(markSets...)
|
||||
}
|
||||
|
||||
// CanSetVal returns false if the given Values can not be coalesced
|
||||
// into a single Set due to inconsistent element types.
|
||||
func CanSetVal(vals []Value) bool {
|
||||
elementType := DynamicPseudoType
|
||||
var markSets []ValueMarks
|
||||
|
||||
for _, val := range vals {
|
||||
if unmarkedVal, marks := val.UnmarkDeep(); len(marks) > 0 {
|
||||
val = unmarkedVal
|
||||
markSets = append(markSets, marks)
|
||||
}
|
||||
if elementType == DynamicPseudoType {
|
||||
elementType = val.ty
|
||||
} else if val.ty != DynamicPseudoType && !elementType.Equals(val.ty) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// SetValFromValueSet returns a Value of set type based on an already-constructed
|
||||
// ValueSet.
|
||||
//
|
||||
|
|
|
@ -116,9 +116,9 @@ func (val Value) GoString() string {
|
|||
// Use RawEquals to compare if two values are equal *ignoring* the
|
||||
// short-circuit rules and the exception for null values.
|
||||
func (val Value) Equals(other Value) Value {
|
||||
if val.IsMarked() || other.IsMarked() {
|
||||
val, valMarks := val.Unmark()
|
||||
other, otherMarks := other.Unmark()
|
||||
if val.ContainsMarked() || other.ContainsMarked() {
|
||||
val, valMarks := val.UnmarkDeep()
|
||||
other, otherMarks := other.UnmarkDeep()
|
||||
return val.Equals(other).WithMarks(valMarks, otherMarks)
|
||||
}
|
||||
|
||||
|
@ -191,7 +191,7 @@ func (val Value) Equals(other Value) Value {
|
|||
|
||||
switch {
|
||||
case ty == Number:
|
||||
result = val.v.(*big.Float).Cmp(other.v.(*big.Float)) == 0
|
||||
result = rawNumberEqual(val.v.(*big.Float), other.v.(*big.Float))
|
||||
case ty == Bool:
|
||||
result = val.v.(bool) == other.v.(bool)
|
||||
case ty == String:
|
||||
|
@ -492,18 +492,23 @@ func (val Value) RawEquals(other Value) bool {
|
|||
|
||||
case ty.IsMapType():
|
||||
ety := ty.typeImpl.(typeMap).ElementTypeT
|
||||
if len(val.v.(map[string]interface{})) == len(other.v.(map[string]interface{})) {
|
||||
for k := range val.v.(map[string]interface{}) {
|
||||
if _, ok := other.v.(map[string]interface{})[k]; !ok {
|
||||
if !val.HasSameMarks(other) {
|
||||
return false
|
||||
}
|
||||
valUn, _ := val.Unmark()
|
||||
otherUn, _ := other.Unmark()
|
||||
if len(valUn.v.(map[string]interface{})) == len(otherUn.v.(map[string]interface{})) {
|
||||
for k := range valUn.v.(map[string]interface{}) {
|
||||
if _, ok := otherUn.v.(map[string]interface{})[k]; !ok {
|
||||
return false
|
||||
}
|
||||
lhs := Value{
|
||||
ty: ety,
|
||||
v: val.v.(map[string]interface{})[k],
|
||||
v: valUn.v.(map[string]interface{})[k],
|
||||
}
|
||||
rhs := Value{
|
||||
ty: ety,
|
||||
v: other.v.(map[string]interface{})[k],
|
||||
v: otherUn.v.(map[string]interface{})[k],
|
||||
}
|
||||
eq := lhs.RawEquals(rhs)
|
||||
if !eq {
|
||||
|
@ -1278,9 +1283,7 @@ func (val Value) AsBigFloat() *big.Float {
|
|||
}
|
||||
|
||||
// Copy the float so that callers can't mutate our internal state
|
||||
ret := *(val.v.(*big.Float))
|
||||
|
||||
return &ret
|
||||
return new(big.Float).Copy(val.v.(*big.Float))
|
||||
}
|
||||
|
||||
// AsValueSlice returns a []cty.Value representation of a non-null, non-unknown
|
||||
|
|
|
@ -33,10 +33,15 @@ func walk(path Path, val Value, cb func(Path, Value) (bool, error)) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
// The callback already got a chance to see the mark in our
|
||||
// call above, so can safely strip it off here in order to
|
||||
// visit the child elements, which might still have their own marks.
|
||||
rawVal, _ := val.Unmark()
|
||||
|
||||
ty := val.Type()
|
||||
switch {
|
||||
case ty.IsObjectType():
|
||||
for it := val.ElementIterator(); it.Next(); {
|
||||
for it := rawVal.ElementIterator(); it.Next(); {
|
||||
nameVal, av := it.Element()
|
||||
path := append(path, GetAttrStep{
|
||||
Name: nameVal.AsString(),
|
||||
|
@ -46,8 +51,8 @@ func walk(path Path, val Value, cb func(Path, Value) (bool, error)) error {
|
|||
return err
|
||||
}
|
||||
}
|
||||
case val.CanIterateElements():
|
||||
for it := val.ElementIterator(); it.Next(); {
|
||||
case rawVal.CanIterateElements():
|
||||
for it := rawVal.ElementIterator(); it.Next(); {
|
||||
kv, ev := it.Element()
|
||||
path := append(path, IndexStep{
|
||||
Key: kv,
|
||||
|
@ -134,6 +139,12 @@ func transform(path Path, val Value, t Transformer) (Value, error) {
|
|||
ty := val.Type()
|
||||
var newVal Value
|
||||
|
||||
// We need to peel off any marks here so that we can dig around
|
||||
// inside any collection values. We'll reapply these to any
|
||||
// new collections we construct, but the transformer's Exit
|
||||
// method gets the final say on what to do with those.
|
||||
rawVal, marks := val.Unmark()
|
||||
|
||||
switch {
|
||||
|
||||
case val.IsNull() || !val.IsKnown():
|
||||
|
@ -141,14 +152,14 @@ func transform(path Path, val Value, t Transformer) (Value, error) {
|
|||
newVal = val
|
||||
|
||||
case ty.IsListType() || ty.IsSetType() || ty.IsTupleType():
|
||||
l := val.LengthInt()
|
||||
l := rawVal.LengthInt()
|
||||
switch l {
|
||||
case 0:
|
||||
// No deep transform for an empty sequence
|
||||
newVal = val
|
||||
default:
|
||||
elems := make([]Value, 0, l)
|
||||
for it := val.ElementIterator(); it.Next(); {
|
||||
for it := rawVal.ElementIterator(); it.Next(); {
|
||||
kv, ev := it.Element()
|
||||
path := append(path, IndexStep{
|
||||
Key: kv,
|
||||
|
@ -161,25 +172,25 @@ func transform(path Path, val Value, t Transformer) (Value, error) {
|
|||
}
|
||||
switch {
|
||||
case ty.IsListType():
|
||||
newVal = ListVal(elems)
|
||||
newVal = ListVal(elems).WithMarks(marks)
|
||||
case ty.IsSetType():
|
||||
newVal = SetVal(elems)
|
||||
newVal = SetVal(elems).WithMarks(marks)
|
||||
case ty.IsTupleType():
|
||||
newVal = TupleVal(elems)
|
||||
newVal = TupleVal(elems).WithMarks(marks)
|
||||
default:
|
||||
panic("unknown sequence type") // should never happen because of the case we are in
|
||||
}
|
||||
}
|
||||
|
||||
case ty.IsMapType():
|
||||
l := val.LengthInt()
|
||||
l := rawVal.LengthInt()
|
||||
switch l {
|
||||
case 0:
|
||||
// No deep transform for an empty map
|
||||
newVal = val
|
||||
default:
|
||||
elems := make(map[string]Value)
|
||||
for it := val.ElementIterator(); it.Next(); {
|
||||
for it := rawVal.ElementIterator(); it.Next(); {
|
||||
kv, ev := it.Element()
|
||||
path := append(path, IndexStep{
|
||||
Key: kv,
|
||||
|
@ -190,7 +201,7 @@ func transform(path Path, val Value, t Transformer) (Value, error) {
|
|||
}
|
||||
elems[kv.AsString()] = newEv
|
||||
}
|
||||
newVal = MapVal(elems)
|
||||
newVal = MapVal(elems).WithMarks(marks)
|
||||
}
|
||||
|
||||
case ty.IsObjectType():
|
||||
|
@ -212,7 +223,7 @@ func transform(path Path, val Value, t Transformer) (Value, error) {
|
|||
}
|
||||
newAVs[name] = newAV
|
||||
}
|
||||
newVal = ObjectVal(newAVs)
|
||||
newVal = ObjectVal(newAVs).WithMarks(marks)
|
||||
}
|
||||
|
||||
default:
|
||||
|
|
|
@ -16,6 +16,8 @@ github.com/agl/ed25519/edwards25519
|
|||
github.com/apparentlymart/go-cidr/cidr
|
||||
# github.com/apparentlymart/go-textseg/v12 v12.0.0
|
||||
github.com/apparentlymart/go-textseg/v12/textseg
|
||||
# github.com/apparentlymart/go-textseg/v13 v13.0.0
|
||||
github.com/apparentlymart/go-textseg/v13/textseg
|
||||
# github.com/beorn7/perks v1.0.1
|
||||
github.com/beorn7/perks/quantile
|
||||
# github.com/bugsnag/bugsnag-go v1.4.1
|
||||
|
@ -432,7 +434,7 @@ github.com/xeipuuv/gojsonpointer
|
|||
github.com/xeipuuv/gojsonreference
|
||||
# github.com/xeipuuv/gojsonschema v1.2.0
|
||||
github.com/xeipuuv/gojsonschema
|
||||
# github.com/zclconf/go-cty v1.7.1
|
||||
# github.com/zclconf/go-cty v1.10.0
|
||||
## explicit
|
||||
github.com/zclconf/go-cty/cty
|
||||
github.com/zclconf/go-cty/cty/convert
|
||||
|
|
Loading…
Reference in New Issue