Clarify doc for Color.hex and C# Color()
The "alpha channel first" seems misleading to me. It doesn't match with the examples, so remove it. Add a more detailed specification of the expected number format in hex.
This commit is contained in:
parent
8f25cc2d13
commit
779ac20bb9
2 changed files with 6 additions and 6 deletions
|
|
@ -556,7 +556,7 @@ namespace Godot
|
|||
/// Constructs a <see cref="Color"/> from an unsigned 32-bit integer in RGBA format
|
||||
/// (each byte represents a color channel).
|
||||
/// </summary>
|
||||
/// <param name="rgba">The <see langword="uint"/> representing the color.</param>
|
||||
/// <param name="rgba">The <see langword="uint"/> representing the color as 0xRRGGBBAA.</param>
|
||||
public Color(uint rgba)
|
||||
{
|
||||
A = (rgba & 0xFF) / 255.0f;
|
||||
|
|
@ -572,7 +572,7 @@ namespace Godot
|
|||
/// Constructs a <see cref="Color"/> from an unsigned 64-bit integer in RGBA format
|
||||
/// (each word represents a color channel).
|
||||
/// </summary>
|
||||
/// <param name="rgba">The <see langword="ulong"/> representing the color.</param>
|
||||
/// <param name="rgba">The <see langword="ulong"/> representing the color as 0xRRRRGGGGBBBBAAAA.</param>
|
||||
public Color(ulong rgba)
|
||||
{
|
||||
A = (rgba & 0xFFFF) / 65535.0f;
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue